scala.collection.immutable.SortedSet Scala Examples

The following examples show how to use scala.collection.immutable.SortedSet. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: package.scala    From pureconfig   with Mozilla Public License 2.0 5 votes vote down vote up
package pureconfig.module

import _root_.cats.data._
import _root_.cats.kernel.Order
import _root_.cats.{ Alternative, Foldable }
import _root_.cats.implicits._
import pureconfig.{ ConfigReader, ConfigWriter, Exported }

import scala.collection.immutable.{ SortedMap, SortedSet }
import scala.language.higherKinds
import scala.reflect.ClassTag


package object cats {

  private[pureconfig] def fromNonEmpty[A, B](reader: ConfigReader[A])(fromX: A => Option[B])(implicit ct: ClassTag[A]): ConfigReader[B] =
    reader.emap(x => fromX(x).toRight(EmptyTraversableFound(ct.toString)))

  implicit def nonEmptyListReader[A](implicit reader: ConfigReader[List[A]]): ConfigReader[NonEmptyList[A]] =
    fromNonEmpty(reader)(NonEmptyList.fromList)
  implicit def nonEmptyListWriter[A](implicit writer: ConfigWriter[List[A]]): ConfigWriter[NonEmptyList[A]] =
    writer.contramap(_.toList)

  implicit def nonEmptyVectorReader[A](implicit reader: ConfigReader[Vector[A]]): ConfigReader[NonEmptyVector[A]] =
    fromNonEmpty(reader)(NonEmptyVector.fromVector)
  implicit def nonEmptyVectorWriter[A](implicit writer: ConfigWriter[Vector[A]]): ConfigWriter[NonEmptyVector[A]] =
    writer.contramap(_.toVector)

  implicit def nonEmptySetReader[A](implicit reader: ConfigReader[SortedSet[A]]): ConfigReader[NonEmptySet[A]] =
    fromNonEmpty(reader)(NonEmptySet.fromSet)
  implicit def nonEmptySetWriter[A](implicit writer: ConfigWriter[SortedSet[A]]): ConfigWriter[NonEmptySet[A]] =
    writer.contramap(_.toSortedSet)

  implicit def nonEmptyMapReader[A, B](implicit reader: ConfigReader[Map[A, B]], ord: Order[A]): ConfigReader[NonEmptyMap[A, B]] =
    fromNonEmpty(reader)(x => NonEmptyMap.fromMap(SortedMap(x.toSeq: _*)(ord.toOrdering)))
  implicit def nonEmptyMapWriter[A, B](implicit writer: ConfigWriter[Map[A, B]]): ConfigWriter[NonEmptyMap[A, B]] =
    writer.contramap(_.toSortedMap)

  // For emptiable foldables not covered by TraversableOnce reader/writer, e.g. Chain.
  implicit def lowPriorityNonReducibleReader[A, F[_]: Foldable: Alternative](implicit reader: ConfigReader[List[A]]): Exported[ConfigReader[F[A]]] =
    Exported(reader.map(to => (to foldRight Alternative[F].empty[A])(_.pure[F] <+> _)))
  implicit def lowPriorityNonReducibleWriter[A, F[_]: Foldable: Alternative](implicit writer: ConfigWriter[List[A]]): Exported[ConfigWriter[F[A]]] =
    Exported(writer.contramap(_.toList))

  implicit def nonEmptyChainReader[A](implicit reader: ConfigReader[Chain[A]]): ConfigReader[NonEmptyChain[A]] =
    fromNonEmpty(reader)(NonEmptyChain.fromChain)
  implicit def nonEmptyChainWriter[A](implicit writer: ConfigWriter[Chain[A]]): ConfigWriter[NonEmptyChain[A]] =
    writer.contramap(_.toChain)
} 
Example 2
Source File: ScannerSpec.scala    From GettingWorkDoneWithExtensibleEffects   with Apache License 2.0 5 votes vote down vote up
package scan

import java.io._
import java.io._
import java.nio.file._

import cats._
import cats.data._
import cats.implicits._

import org.atnos.eff._
import org.atnos.eff.all._
import org.atnos.eff.syntax.all._

import org.atnos.eff.addon.monix._
import org.atnos.eff.addon.monix.task._
import org.atnos.eff.syntax.addon.monix.task._

import org.specs2._

import scala.collection.immutable.SortedSet

import scala.concurrent.duration._

import monix.execution.Scheduler.Implicits.global

class ScannerSpec extends mutable.Specification {

  import EffOptics._

  case class MockFilesystem(directories: Map[Directory, List[FilePath]], fileSizes: Map[File, Long]) extends Filesystem {

    def length(file: File) = fileSizes.getOrElse(file, throw new IOException())

    def listFiles(directory: Directory) = directories.getOrElse(directory, throw new IOException())

    def filePath(path: String): FilePath =
      if (directories.keySet.contains(Directory(path)))
        Directory(path)
      else if (fileSizes.keySet.contains(File(path)))
        File(path)
      else
        throw new FileNotFoundException(path)
  }

  "file scan" ! {
    val base = Directory("base")
    val base1 = File(s"${base.path}/1.txt")
    val base2 = File(s"${base.path}/2.txt")
    val subdir = Directory(s"${base.path}/subdir")
    val sub1 = File(s"${subdir.path}/1.txt")
    val sub3 = File(s"${subdir.path}/3.txt")
    val fs: Filesystem = MockFilesystem(
      Map(
        base -> List(subdir, base1, base2),
        subdir -> List(sub1, sub3)
      ),
      Map(base1 -> 1, base2 -> 2, sub1 -> 1, sub3 -> 3)
    )

    val program = Scanner.pathScan[Scanner.R](base)
    val actual = program.runReader(AppConfig(ScanConfig(2), fs)).runAsync.runSyncUnsafe(3.seconds)
    val expected = new PathScan(SortedSet(FileSize(sub3, 3), FileSize(base2, 2)), 7, 4)

    actual.mustEqual(expected)
  }
} 
Example 3
Source File: ScannerSpec.scala    From GettingWorkDoneWithExtensibleEffects   with Apache License 2.0 5 votes vote down vote up
package scan

import java.io._
import java.io._
import java.nio.file._

import cats._
import cats.data._
import cats.implicits._

import org.atnos.eff._
import org.atnos.eff.all._
import org.atnos.eff.syntax.all._

import org.atnos.eff.addon.monix._
import org.atnos.eff.addon.monix.task._
import org.atnos.eff.syntax.addon.monix.task._

import org.specs2._

import scala.collection.immutable.SortedSet

import scala.concurrent.duration._

import monix.execution.Scheduler.Implicits.global

class ScannerSpec extends mutable.Specification {

  case class MockFilesystem(directories: Map[Directory, List[FilePath]], fileSizes: Map[File, Long]) extends Filesystem {

    def length(file: File) = fileSizes.getOrElse(file, throw new IOException())

    def listFiles(directory: Directory) = directories.getOrElse(directory, throw new IOException())

    def filePath(path: String): FilePath =
      if (directories.keySet.contains(Directory(path)))
        Directory(path)
      else if (fileSizes.keySet.contains(File(path)))
        File(path)
      else
        throw new FileNotFoundException(path)
  }

  "file scan" ! {
    val base = Directory("base")
    val base1 = File(s"${base.path}/1.txt")
    val base2 = File(s"${base.path}/2.txt")
    val subdir = Directory(s"${base.path}/subdir")
    val sub1 = File(s"${subdir.path}/1.txt")
    val sub3 = File(s"${subdir.path}/3.txt")
    val fs: Filesystem = MockFilesystem(
      Map(
        base -> List(subdir, base1, base2),
        subdir -> List(sub1, sub3)
      ),
      Map(base1 -> 1, base2 -> 2, sub1 -> 1, sub3 -> 3)
    )

    val program = Scanner.pathScan[Scanner.R](base)
    val actual = program.runReader(ScanConfig(2)).runReader(fs).runAsync.runSyncUnsafe(3.seconds)
    val expected = new PathScan(SortedSet(FileSize(sub3, 3), FileSize(base2, 2)), 7, 4)

    actual.mustEqual(expected)
  }
} 
Example 4
Source File: ScannerSpec.scala    From GettingWorkDoneWithExtensibleEffects   with Apache License 2.0 5 votes vote down vote up
package scan

import java.io.FileNotFoundException
import java.io.IOException
import java.nio.file._

import cats._
import cats.data._
import cats.implicits._
import org.atnos.eff._
import org.atnos.eff.all._
import org.atnos.eff.syntax.all._
import org.atnos.eff.addon.monix._
import org.atnos.eff.addon.monix.task._
import org.atnos.eff.syntax.addon.monix.task._
import org.specs2._

import scala.collection.immutable.SortedSet
import scala.concurrent.duration._
import monix.eval._
import monix.execution.Scheduler.Implicits.global

class ScannerSpec extends mutable.Specification {

  case class MockFilesystem(directories: Map[Directory, List[FilePath]], fileSizes: Map[File, Long]) extends Filesystem {

    def length(file: File) = fileSizes.getOrElse(file, throw new IOException())

    def listFiles(directory: Directory) = directories.getOrElse(directory, throw new IOException())

    def filePath(path: String): FilePath =
      if (directories.keySet.contains(Directory(path)))
        Directory(path)
      else if (fileSizes.keySet.contains(File(path)))
        File(path)
      else
        throw new FileNotFoundException(path)
  }

  val base = Directory("base")
  val base1 = File(s"${base.path}/1.txt")
  val base2 = File(s"${base.path}/2.txt")
  val subdir = Directory(s"${base.path}/subdir")
  val sub1 = File(s"${subdir.path}/1.txt")
  val sub3 = File(s"${subdir.path}/3.txt")
  val directories = Map(
    base -> List(subdir, base1, base2),
    subdir -> List(sub1, sub3)
  )
  val fileSizes = Map(base1 -> 1L, base2 -> 2L, sub1 -> 1L, sub3 -> 3L)
  val fs = MockFilesystem(directories, fileSizes)

  type R = Fx.fx4[Task, Reader[Filesystem, ?], Reader[ScanConfig, ?], Writer[Log, ?]]

  def run[T](program: Eff[R, T], fs: Filesystem) =
    program.runReader(ScanConfig(2)).runReader(fs).taskAttempt.runWriter.runAsync.runSyncUnsafe(3.seconds)

  val expected = Right(new PathScan(SortedSet(FileSize(sub3, 3), FileSize(base2, 2)), 7, 4))
  val expectedLogs = Set(
    Log.info("Scan started on Directory(base)"),
    Log.debug("Scanning directory 'Directory(base)': 1 subdirectories and 2 files"),
    Log.debug("File base/1.txt Size 1 B"),
    Log.debug("File base/2.txt Size 2 B"),
    Log.debug("Scanning directory 'Directory(base/subdir)': 0 subdirectories and 2 files"),
    Log.debug("File base/subdir/1.txt Size 1 B"),
    Log.debug("File base/subdir/3.txt Size 3 B")
  )

  val (actual, logs) = run(Scanner.pathScan(base), fs)

  "Report Format" ! {actual.mustEqual(expected)}

  "Logs messages are emitted (ignores order due to non-determinstic concurrent execution)" ! {
    expectedLogs.forall(logs.contains)
  }
} 
Example 5
Source File: ScannerSpec.scala    From GettingWorkDoneWithExtensibleEffects   with Apache License 2.0 5 votes vote down vote up
package scan

import java.io.FileNotFoundException
import java.io.IOException
import java.nio.file._

import cats._
import cats.data._
import cats.implicits._
import org.atnos.eff._
import org.atnos.eff.all._
import org.atnos.eff.syntax.all._
import org.atnos.eff.addon.monix._
import org.atnos.eff.addon.monix.task._
import org.atnos.eff.syntax.addon.monix.task._
import org.specs2._

import scala.collection.immutable.SortedSet
import scala.concurrent.duration._
import monix.eval._
import monix.execution.Scheduler.Implicits.global

class ScannerSpec extends mutable.Specification {

  case class MockFilesystem(directories: Map[Directory, List[FilePath]], fileSizes: Map[File, Long]) extends Filesystem {

    def length(file: File) = fileSizes.getOrElse(file, throw new IOException())

    def listFiles(directory: Directory) = directories.getOrElse(directory, throw new IOException())

    def filePath(path: String): FilePath =
      if (directories.keySet.contains(Directory(path)))
        Directory(path)
      else if (fileSizes.keySet.contains(File(path)))
        File(path)
      else
        throw new FileNotFoundException(path)
  }

  val base = Directory("base")
  val linkTarget = File(s"/somewhere/else/7.txt")
  val base1 = File(s"${base.path}/1.txt")
  val baseLink = Symlink(s"${base.path}/7.txt", linkTarget)
  val subdir = Directory(s"${base.path}/subdir")
  val sub2 = File(s"${subdir.path}/2.txt")
  val subLink = Symlink(s"${subdir.path}/7.txt", linkTarget)
  val directories = Map(
    base -> List(subdir, base1, baseLink),
    subdir -> List(sub2, subLink)
  )
  val fileSizes = Map(base1 -> 1L, sub2 -> 2L, linkTarget -> 7L)
  val fs = MockFilesystem(directories, fileSizes)

  type R = Fx.fx5[Task, Reader[Filesystem, ?], Reader[ScanConfig, ?], Writer[Log, ?], State[Set[FilePath], ?]]

  def run[T](program: Eff[R, T], fs: Filesystem) =
    program.runReader(ScanConfig(2)).runReader(fs).evalStateZero[Set[FilePath]].taskAttempt.runWriter[Log].runAsync.runSyncUnsafe(3.seconds)

  val expected = Right(new PathScan(SortedSet(FileSize(linkTarget, 7), FileSize(sub2, 2)), 10, 3))

  val (actual, logs) = run(Scanner.pathScan[R](base), fs)

  "Report Format" ! {actual.mustEqual(expected)}

} 
Example 6
Source File: ScannerSpec.scala    From GettingWorkDoneWithExtensibleEffects   with Apache License 2.0 5 votes vote down vote up
package scan

import java.io.FileNotFoundException
import java.io.IOException
import java.nio.file._

import cats._
import cats.data._
import cats.implicits._
import org.atnos.eff._
import org.atnos.eff.all._
import org.atnos.eff.syntax.all._
import org.atnos.eff.addon.monix._
import org.atnos.eff.addon.monix.task._
import org.atnos.eff.syntax.addon.monix.task._
import org.specs2._

import scala.collection.immutable.SortedSet
import scala.concurrent.duration._
import monix.eval._
import monix.execution.Scheduler.Implicits.global

class ScannerSpec extends mutable.Specification {

  case class MockFilesystem(directories: Map[Directory, List[FilePath]], fileSizes: Map[File, Long]) extends Filesystem {

    def length(file: File) = fileSizes.getOrElse(file, throw new IOException())

    def listFiles(directory: Directory) = directories.getOrElse(directory, throw new IOException())

    def filePath(path: String): FilePath =
      if (directories.keySet.contains(Directory(path)))
        Directory(path)
      else if (fileSizes.keySet.contains(File(path)))
        File(path)
      else
        throw new FileNotFoundException(path)
  }

  val base = Directory("base")
  val base1 = File(s"${base.path}/1.txt")
  val base2 = File(s"${base.path}/2.txt")
  val subdir = Directory(s"${base.path}/subdir")
  val sub1 = File(s"${subdir.path}/1.txt")
  val sub3 = File(s"${subdir.path}/3.txt")
  val directories = Map(
    base -> List(subdir, base1, base2),
    subdir -> List(sub1, sub3)
  )
  val fileSizes = Map(base1 -> 1L, base2 -> 2L, sub1 -> 1L, sub3 -> 3L)
  val fs = MockFilesystem(directories, fileSizes)

  type R = Fx.fx4[Task, Reader[Filesystem, ?], Reader[ScanConfig, ?], Writer[Log, ?]]

  def run[T](program: Eff[R, T], fs: Filesystem) =
    program.runReader(ScanConfig(2)).runReader(fs).taskAttempt.runWriter.runAsync.runSyncUnsafe(3.seconds)

  val expected = Right(new PathScan(SortedSet(FileSize(sub3, 3), FileSize(base2, 2)), 7, 4))
  val expectedLogs = Set(
    Log.info("Scan started on Directory(base)"),
    Log.debug("Scanning directory 'Directory(base)': 1 subdirectories and 2 files"),
    Log.debug("File base/1.txt Size 1 B"),
    Log.debug("File base/2.txt Size 2 B"),
    Log.debug("Scanning directory 'Directory(base/subdir)': 0 subdirectories and 2 files"),
    Log.debug("File base/subdir/1.txt Size 1 B"),
    Log.debug("File base/subdir/3.txt Size 3 B")
  )

  val (actual, logs) = run(Scanner.pathScan(base), fs)

  "Report Format" ! {actual.mustEqual(expected)}

  "Logs messages are emitted (ignores order due to non-determinstic concurrent execution)" ! {
    logs.forall(expectedLogs.contains)
  }
} 
Example 7
Source File: ScannerSpec.scala    From GettingWorkDoneWithExtensibleEffects   with Apache License 2.0 5 votes vote down vote up
package scan

import java.io.FileNotFoundException
import java.io.IOException
import java.nio.file._

import cats._
import cats.data._
import cats.implicits._
import org.atnos.eff._
import org.atnos.eff.all._
import org.atnos.eff.syntax.all._
import org.atnos.eff.addon.monix._
import org.atnos.eff.addon.monix.task._
import org.atnos.eff.syntax.addon.monix.task._
import org.specs2._

import scala.collection.immutable.SortedSet
import scala.concurrent.duration._
import monix.eval._
import monix.execution.Scheduler.Implicits.global

class ScannerSpec extends mutable.Specification {

  case class MockFilesystem(directories: Map[Directory, List[FilePath]], fileSizes: Map[File, Long]) extends Filesystem {

    def length(file: File) = fileSizes.getOrElse(file, throw new IOException())

    def listFiles(directory: Directory) = directories.getOrElse(directory, throw new IOException())

    def filePath(path: String): FilePath =
      if (directories.keySet.contains(Directory(path)))
        Directory(path)
      else if (fileSizes.keySet.contains(File(path)))
        File(path)
      else
        throw new FileNotFoundException(path)
  }

  val base = Directory("base")
  val base1 = File(s"${base.path}/1.txt")
  val base2 = File(s"${base.path}/2.txt")
  val subdir = Directory(s"${base.path}/subdir")
  val sub1 = File(s"${subdir.path}/1.txt")
  val sub3 = File(s"${subdir.path}/3.txt")
  val directories = Map(
    base -> List(subdir, base1, base2),
    subdir -> List(sub1, sub3)
  )
  val fileSizes = Map(base1 -> 1L, base2 -> 2L, sub1 -> 1L, sub3 -> 3L)
  val fs = MockFilesystem(directories, fileSizes)

  type R = Fx.fx3[Task, Reader[Filesystem, ?], Reader[ScanConfig, ?]]

  def run[T](program: Eff[R, T], fs: Filesystem) =
    program.runReader(ScanConfig(2)).runReader(fs).runAsync.attempt.runSyncUnsafe(3.seconds)

  "file scan" ! {
    val actual = run(Scanner.pathScan(base), fs)
    val expected = Right(new PathScan(SortedSet(FileSize(sub3, 3), FileSize(base2, 2)), 7, 4))

    actual.mustEqual(expected)
  }

  "Error from Filesystem" ! {
    val emptyFs: Filesystem = MockFilesystem(directories, Map.empty)

    val actual = runE(Scanner.scanReport(Array("base", "10")), emptyFs)
    val expected = ???

    actual.mustEqual(expected)
  }

  type E = Fx.fx3[Task, Reader[Filesystem, ?], Either[String, ?]]
  def runE[T](program: Eff[E, T], fs: Filesystem) =
    //there are two nested Either in the stack, one from Exceptions and one from errors raised by the program
    //we convert to a common error type String then flatten
    program.runReader(fs).runEither.runAsync.attempt.runSyncUnsafe(3.seconds).leftMap(_.toString).flatten

  "Error - Report with non-numeric input" ! {
    val actual = runE(Scanner.scanReport(Array("base", "not a number")), fs)
    val expected = Left("Number of files must be numeric: not a number")

    actual.mustEqual(expected)
  }

  "Error - Report with non-positive input" ! {
    val actual = runE(Scanner.scanReport(Array("base", "-1")), fs)
    val expected = Left("Invalid number of files -1")

    actual.mustEqual(expected)
  }
} 
Example 8
Source File: PartitionsToSegments.scala    From kafka-journal   with MIT License 5 votes vote down vote up
package com.evolutiongaming.kafka.journal.replicator

import cats.Monad
import cats.data.{NonEmptySet => Nes}
import cats.implicits._
import com.evolutiongaming.catshelper.DataHelper._
import com.evolutiongaming.kafka.journal.eventual.cassandra.{SegmentNr, Segments}
import com.evolutiongaming.kafka.journal.util.Fail
import com.evolutiongaming.skafka.Partition

import scala.collection.immutable.SortedSet


trait PartitionsToSegments {

  def apply(partitions: Nes[Partition]): SortedSet[SegmentNr]
}

object PartitionsToSegments {

  def of[F[_] : Monad : Fail](
    partitions: Int,
    segments: Segments = Segments.default
  ): F[PartitionsToSegments] = {

    (SegmentNr.min.value until segments.value.toLong)
      .toList
      .traverse { segment => SegmentNr.of[F](segment) }
      .map { segmentNrs =>
        val filter = {
          if (partitions >= segments.value) {
            (a: Partition, b: SegmentNr) => a.value % segments.value.toLong === b.value
          } else {
            (a: Partition, b: SegmentNr) => b.value % partitions === a.value.toLong
          }
        }

        partitions: Nes[Partition] => {
          for {
            partition <- partitions.toSortedSet
            segmentNr <- segmentNrs.toSortedSet
            if filter(partition, segmentNr)
          } yield segmentNr
        }
      }
  }
} 
Example 9
Source File: PartitionsToSegmentsTest.scala    From kafka-journal   with MIT License 5 votes vote down vote up
package com.evolutiongaming.kafka.journal.replicator

import cats.data.{NonEmptySet => Nes}
import cats.implicits._
import com.evolutiongaming.kafka.journal.eventual.cassandra.{SegmentNr, Segments}
import com.evolutiongaming.catshelper.DataHelper._
import com.evolutiongaming.skafka.Partition
import org.scalatest.Succeeded
import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.should.Matchers

import scala.collection.immutable.SortedSet
import scala.util.Try

class PartitionsToSegmentsTest extends AnyFunSuite with Matchers {
  for {
    (partitions, partitionNrs, segmentNrs) <- List(
      (20,  Nes.of(0),       SortedSet(0, 20, 40, 60, 80)),
      (30,  Nes.of(0),       SortedSet(0, 30, 60, 90)),
      (100, Nes.of(0, 1, 2), SortedSet(0, 1, 2)),
      (1,   Nes.of(0),       (0 until 100).toSortedSet))
  } {
    test(s"partitions: $partitions, segmentNrs: $segmentNrs, partitionNrs: $partitionNrs") {
      val result = for {
        segmentNrs           <- segmentNrs.toList.traverse { a => SegmentNr.of[Try](a.toLong) }
        partitionNrs         <- partitionNrs.toNel.traverse { a => Partition.of[Try](a) }
        partitionsToSegments <- PartitionsToSegments.of[Try](partitions, Segments.default)
      } yield {
        val actual = partitionsToSegments(partitionNrs.toNes)
        actual shouldEqual segmentNrs.toSortedSet
      }
      result shouldEqual Succeeded.pure[Try]
    }
  }
} 
Example 10
Source File: QuerySpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.rdf

import cats.kernel.Eq
import cats.syntax.show._
import ch.epfl.bluebrain.nexus.rdf.Iri._

import scala.collection.immutable.{SortedMap, SortedSet}

class QuerySpec extends RdfSpec {

  "A Query" should {
    "be constructed successfully" in {
      // format: off
      val cases = List(
        "" -> SortedMap.empty[String, SortedSet[String]],
        "a=b&a=b&a=c&a&b&b&b=c&d/&e?" -> SortedMap(
          "a" -> SortedSet("", "b", "c"),
          "b" -> SortedSet("", "c"),
          "d/" -> SortedSet(""),
          "e?" -> SortedSet("")),
        "%3D%26=%3D%26&%3D&%26" -> SortedMap(
          "=&" -> SortedSet("=&"),
          "="  -> SortedSet(""),
          "&"  -> SortedSet("")),
        "%C2%A3=%C3%86" -> SortedMap("£" -> SortedSet("Æ"))
      )
      // format: on
      forAll(cases) {
        case (raw, map) =>
          Query(raw).rightValue.value shouldEqual map
      }
    }
    "fail to parse" in {
      val cases = List("a==b", "a=b&", "a#", "a&&", "a=&b")
      forAll(cases) { str => Query(str).leftValue }
    }
    "show" in {
      val encodedDelim = urlEncode("[]#")
      Query("a=b&a=b&a=c&a&b&b&b=c&d&e" + encodedDelim).rightValue.show shouldEqual "a&a=b&a=c&b&b=c&d&e" + encodedDelim
    }
    "pct encoded representation" in {
      val utf8         = "£Æ"
      val encodedUtf8  = urlEncode(utf8)
      val allowedDelim = "!:@!$()*,"
      val encodedDelim = urlEncode("[]#")
      Query(
        utf8 + encodedUtf8 + allowedDelim + encodedDelim
      ).rightValue.pctEncoded shouldEqual (encodedUtf8 + encodedUtf8 + allowedDelim + encodedDelim)
    }
    "eq" in {
      val lhs = Query("a=b&a=b&a=c&a&b&b&b=c&d&e").rightValue
      val rhs = Query("a=b&a=b&a=c&a&b&b=c&d&e").rightValue
      Eq.eqv(lhs, rhs) shouldEqual true
    }
  }
} 
Example 11
Source File: RecordDomainCache.scala    From apalache   with Apache License 2.0 5 votes vote down vote up
package at.forsyte.apalache.tla.bmcmt.caches

import at.forsyte.apalache.tla.bmcmt.implicitConversions._
import at.forsyte.apalache.tla.bmcmt.types.{ConstT, FinSetT}
import at.forsyte.apalache.tla.bmcmt.{Arena, ArenaCell, SolverContext}
import at.forsyte.apalache.tla.lir.convenience.tla

import scala.collection.immutable.SortedSet


  override def create(context: Arena, usedAndUnusedKeys: (SortedSet[String], SortedSet[String])): (Arena, ArenaCell) = {
    val usedKeys = usedAndUnusedKeys._1
    val unusedKeys = usedAndUnusedKeys._2
    val allKeys: SortedSet[String] = usedKeys.union(unusedKeys)
    var arena = context

    def strToCell(str: String): ArenaCell = {
      val (newArena, cell) = strValueCache.getOrCreate(arena, str)
      arena = newArena
      cell
    }

    val allCells = allKeys.toList map strToCell
    // create the domain cell
    arena = arena.appendCell(FinSetT(ConstT()))
    val set = arena.topCell
    arena = arena.appendHas(set, allCells: _*)
    // force that every key in the usedKeys is in the set, whereas every key in the unusedKeys is outside of the set
    for ((cell, key) <- allCells.zip(allKeys)) {
      val cond =
        if (usedKeys.contains(key)) {
          tla.in(cell, set)
        } else {
          tla.not(tla.in(cell, set))
        }

      solverContext.assertGroundExpr(cond)
    }
    (arena, set)
  }
} 
Example 12
Source File: ScannerSpec.scala    From GettingWorkDoneWithExtensibleEffects   with Apache License 2.0 5 votes vote down vote up
package scan

import java.io.PrintWriter
import java.nio.file._

import org.specs2._

import scala.collection.immutable.SortedSet

import scala.concurrent.duration._

import monix.execution.Scheduler.Implicits.global

class ScannerSpec extends mutable.Specification {

  "Report Format" ! {
    val base = deletedOnExit(Files.createTempDirectory("exerciseTask"))
    val base1 = deletedOnExit(fillFile(base, 1))
    val base2 = deletedOnExit(fillFile(base, 2))
    val subdir = deletedOnExit(Files.createTempDirectory(base, "subdir"))
    val sub1 = deletedOnExit(fillFile(subdir, 1))
    val sub3 = deletedOnExit(fillFile(subdir, 3))

    val actual = Scanner.pathScan(FilePath(base), 2).runSyncUnsafe(3.seconds)
    val expected = new PathScan(SortedSet(FileSize(sub3, 3), FileSize(base2, 2)), 7, 4)

    actual.mustEqual(expected)
  }

  def fillFile(dir: Path, size: Int) = {
    val path = dir.resolve(s"$size.txt")
    val w = new PrintWriter(path.toFile)
    try w.write("a" * size)
    finally w.close
    path
  }

  def deletedOnExit(p: Path) = {
    p.toFile.deleteOnExit()
    p
  }

} 
Example 13
Source File: CatsSuite.scala    From pureconfig   with Mozilla Public License 2.0 5 votes vote down vote up
package pureconfig.module.cats

import scala.collection.immutable.{ SortedMap, SortedSet }

import cats.data._
import cats.implicits._
import com.typesafe.config.ConfigFactory.parseString
import pureconfig.BaseSuite
import pureconfig.generic.auto._
import pureconfig.syntax._

class CatsSuite extends BaseSuite {

  case class Numbers(numbers: NonEmptyList[Int])
  case class NumVec(numbers: NonEmptyVector[Int])
  case class NumSet(numbers: NonEmptySet[Int])
  case class NumMap(numbers: NonEmptyMap[String, Int])
  case class NumChain(numbers: NonEmptyChain[Int])

  checkReadWrite[Numbers](parseString(s"""{ numbers: [1,2,3] }""").root() -> Numbers(NonEmptyList(1, List(2, 3))))
  checkReadWrite[NumVec](parseString(s"""{ numbers: [1,2,3] }""").root() -> NumVec(NonEmptyVector(1, Vector(2, 3))))
  checkReadWrite[NumSet](parseString(s"""{ numbers: [1,2,3] }""").root() -> NumSet(NonEmptySet(1, SortedSet(2, 3))))
  checkReadWrite[NumMap](parseString(s"""{
                                           numbers {"1": 1, "2": 2, "3": 3 }
                                         }""").root() -> NumMap(NonEmptyMap(("1", 1), SortedMap("2" -> 2, "3" -> 3))))
  checkReadWrite[NumChain](parseString(s"""{ numbers: [1,2,3] }""").root() -> NumChain(NonEmptyChain(1, 2, 3)))

  it should "return an EmptyTraversableFound when reading empty lists into NonEmptyList" in {
    val config = parseString("{ numbers: [] }")
    config.to[Numbers] should failWith(EmptyTraversableFound("scala.collection.immutable.List"), "numbers", stringConfigOrigin(1))
  }

  it should "return an EmptyTraversableFound when reading empty vector into NonEmptyVector" in {
    val config = parseString("{ numbers: [] }")
    config.to[NumVec] should failWith(EmptyTraversableFound("scala.collection.immutable.Vector"), "numbers", stringConfigOrigin(1))
  }

  it should "return an EmptyTraversableFound when reading empty set into NonEmptySet" in {
    val config = parseString("{ numbers: [] }")
    config.to[NumSet] should failWith(EmptyTraversableFound("scala.collection.immutable.SortedSet"), "numbers", stringConfigOrigin(1))
  }

  it should "return an EmptyTraversableFound when reading empty map into NonEmptyMap" in {
    val config = parseString("{ numbers{} }")
    config.to[NumMap] should failWith(EmptyTraversableFound("scala.collection.immutable.Map"), "numbers", stringConfigOrigin(1))
  }

  it should "return an EmptyTraversableFound when reading empty chain into NonEmptyChain" in {
    val config = parseString("{ numbers: [] }")
    config.to[NumChain] should failWith(EmptyTraversableFound("cats.data.Chain"), "numbers", stringConfigOrigin(1))
  }
} 
Example 14
Source File: XSet.scala    From topshell   with MIT License 5 votes vote down vote up
package com.github.ahnfelt.topshell.data

import scala.collection.immutable.{SortedSet, TreeMap, TreeSet}
import scala.scalajs.js
import scala.scalajs.js.annotation.{JSExport, JSExportTopLevel}

@JSExportTopLevel("XSet")
object XSet {

    private implicit val ordering : Ordering[Any] = XOrder.ordering

    type XSet = SortedSet[Any]
    type Fun[A, B] = js.Function1[A, B]

    @JSExport
    def isInstance(any : Any) : Boolean = any.isInstanceOf[scala.collection.immutable.SortedSet[_]]

    @JSExport
    def of(array : js.Array[Any]) : XSet = {
        TreeSet(array : _*)
    }

    @JSExport
    def toList(set : XSet) : js.Array[Any] = {
        js.Array(set.toList : _*)
    }

    @JSExport
    def toMap(f : Fun[Any, Any], set : XSet) : XMap.XMap = {
        set.foldLeft(XMap.empty) ( (m, v) => m + (v -> f(v)) )
    }

    @JSExport
    def add(value : Any, set : XSet) : XSet = {
        set + value
    }

    @JSExport
    def remove(value : Any, set : XSet) : XSet = {
        set - value
    }

    @JSExport
    def union(a : XSet, b : XSet) : XSet = {
        a ++ b
    }

    @JSExport
    def intersect(a : XSet, b : XSet) : XSet = {
        a.intersect(b)
    }

    @JSExport
    def exclude(a : XSet, b : XSet) : XSet = {
        b -- a
    }

    @JSExport
    def has(value : Any, set : XSet) : Boolean = {
        set.contains(value)
    }

    @JSExport
    def from(value : Any, set : XSet) : XSet = {
        set.from(value)
    }

    @JSExport
    def until(value : Any, set : XSet) : XSet = {
        set.until(value)
    }

    @JSExport
    def foldLeft(f : Fun[Any, Fun[Any, Any]], z : Any, set : XSet) : Any = {
        set.foldLeft(z) { (v, p) => f(p)(v) }
    }

    @JSExport
    def foldRight(f : Fun[Any, Fun[Any, Any]], z : Any, set : XSet) : Any = {
        set.foldRight(z) { (p, v) => f(p)(v) }
    }

    @JSExport
    def size(set : XSet) : Int = {
        set.size
    }

    @JSExport
    def isEmpty(set : XSet) : Boolean = {
        set.isEmpty
    }

    @JSExport
    val empty : XSet = TreeSet.empty

} 
Example 15
Source File: SurvivalDecider.scala    From simple-akka-downing   with Apache License 2.0 5 votes vote down vote up
package com.ajjpj.simpleakkadowning

import akka.actor.Address
import akka.cluster.{Member, UniqueAddress}
import com.ajjpj.simpleakkadowning.SurvivalDecider.ClusterState
import com.typesafe.config.Config

import scala.collection.Set
import scala.collection.immutable.SortedSet


trait SurvivalDecider {
  def isInMinority(clusterState: ClusterState, selfAddress: Address): Boolean
}

object SurvivalDecider {
  private val memberOrdering = new Ordering[ClusterMemberInfo] {
    override def compare (x: ClusterMemberInfo, y: ClusterMemberInfo) =
      Member.addressOrdering.compare(x.uniqueAddress.address, y.uniqueAddress.address)
  }

  case class ClusterMemberInfo(uniqueAddress: UniqueAddress, roles: Set[String], member: Member)
  case class ClusterState(upMembers: Set[ClusterMemberInfo], unreachable: Set[UniqueAddress]) {
    lazy val sortedUpMembers = SortedSet.empty(memberOrdering) ++  upMembers
    lazy val sortedUpAndReachable = sortedUpMembers.filterNot (x => unreachable.contains(x.uniqueAddress))
    lazy val upReachable = upMembers.filterNot(x => unreachable(x.uniqueAddress))
    lazy val upUnreachable = upMembers.filter(x => unreachable(x.uniqueAddress))
  }


  def apply(config: Config): SurvivalDecider = {
    val cc = config.getConfig("simple-akka-downing")

    cc.getString("active-strategy") match {
      case "static-quorum" =>
        val ccc = cc.getConfig("static-quorum")
        val quorumSize = ccc.getInt("quorum-size")
        val role = ccc.getString("role") match {
          case r if r.trim.isEmpty => None
          case r => Some(r)
        }
        new FixedQuorumDecider(quorumSize, role)
      case "keep-majority" =>
        val ccc = cc.getConfig("keep-majority")
        val role = ccc.getString("role") match {
          case r if r.trim.isEmpty => None
          case r => Some(r)
        }
        new KeepMajorityDecider(role)
      case "keep-oldest" =>
        val ccc = cc.getConfig("keep-oldest")
        val downIfAlone = ccc.getBoolean("down-if-alone")
        new KeepOldestDecider(downIfAlone)
    }
  }


  class FixedQuorumDecider(quorumSize: Int, role: Option[String]) extends SurvivalDecider {
    override def isInMinority(clusterState: ClusterState, selfAddress: Address) = {
      val relevantMembers = role match {
        case Some (r) => clusterState.upMembers.filter (_.roles contains r)
        case None =>     clusterState.upMembers
      }

      (relevantMembers -- clusterState.upUnreachable).size < quorumSize
    }
  }

  class KeepMajorityDecider(role: Option[String]) extends SurvivalDecider {
    override def isInMinority (clusterState: ClusterState, selfAddress: Address) = {
      role match {
        case Some(r) =>
          val all = clusterState.upMembers.filter(_.roles contains r)
          val unreachable = clusterState.upUnreachable.filter(_.roles contains r)
          all.size <= 2*unreachable.size
        case None =>
          clusterState.upMembers.size <= 2*clusterState.upUnreachable.size
      }
    }
  }

  class KeepOldestDecider(downIfAlone: Boolean) extends SurvivalDecider {
    override def isInMinority (clusterState: ClusterState, selfAddress: Address) = {
      val allRelevant = clusterState.upMembers
      val oldestRelevant = allRelevant.foldLeft(allRelevant.head)((a, b) => if (a.member isOlderThan b.member) a else b)

      if (downIfAlone) {
        clusterState.upReachable match {
          case s if s == Set(oldestRelevant) => true                                       // only the oldest node --> terminate
          case _ if clusterState.unreachable == Set(oldestRelevant.uniqueAddress) => false // the oldest node is the only unreachable node --> survive
          case _ => clusterState.unreachable contains oldestRelevant.uniqueAddress
        }
      }
      else {
        clusterState.unreachable contains oldestRelevant.uniqueAddress
      }
    }
  }
} 
Example 16
Source File: TapirCodecCats.scala    From tapir   with Apache License 2.0 5 votes vote down vote up
package sttp.tapir.codec.cats

import cats.data.{NonEmptyChain, NonEmptyList, NonEmptySet}
import sttp.tapir.{Schema, SchemaType}
import sttp.tapir._

import scala.collection.immutable.SortedSet

trait TapirCodecCats {

  private def nonEmpty[T, C[X] <: Iterable[X]]: Validator.Primitive[C[T]] = Validator.minSize[T, C](1)

  private def iterableAndNonEmpty[T, C[X] <: Iterable[X]](implicit v: Validator[T]): Validator[C[T]] =
    v.asIterableElements[C].and(nonEmpty)

  implicit def validatorNel[T: Validator]: Validator[NonEmptyList[T]] =
    iterableAndNonEmpty[T, List].contramap(_.toList)

  implicit def validatorNec[T: Validator]: Validator[NonEmptyChain[T]] =
    iterableAndNonEmpty[T, List].contramap(_.toChain.toList)

  implicit def validatorNes[T: Validator]: Validator[NonEmptySet[T]] =
    iterableAndNonEmpty[T, Set].contramap(_.toSortedSet)

  implicit def schemaForNel[T: Schema]: Schema[NonEmptyList[T]] =
    Schema[NonEmptyList[T]](SchemaType.SArray(implicitly[Schema[T]])).copy(isOptional = false)

  implicit def schemaForNec[T: Schema]: Schema[NonEmptyChain[T]] =
    Schema[NonEmptyChain[T]](SchemaType.SArray(implicitly[Schema[T]])).copy(isOptional = false)

  implicit def schemaForNes[T: Schema]: Schema[NonEmptySet[T]] =
    Schema[NonEmptySet[T]](SchemaType.SArray(implicitly[Schema[T]])).copy(isOptional = false)

  implicit def codecForNonEmptyList[L, H, CF <: CodecFormat](implicit c: Codec[L, List[H], CF]): Codec[L, NonEmptyList[H], CF] =
    c.modifySchema(_.copy(isOptional = false))
      .validate(nonEmpty)
      .mapDecode { l => DecodeResult.fromOption(NonEmptyList.fromList(l)) }(_.toList)

  implicit def codecForNonEmptyChain[L, H, CF <: CodecFormat](implicit c: Codec[L, List[H], CF]): Codec[L, NonEmptyChain[H], CF] =
    c.modifySchema(_.copy(isOptional = false))
      .validate(nonEmpty)
      .mapDecode { l => DecodeResult.fromOption(NonEmptyChain.fromSeq(l)) }(_.toNonEmptyList.toList)

  implicit def codecForNonEmptySet[L, H: Ordering, CF <: CodecFormat](implicit c: Codec[L, Set[H], CF]): Codec[L, NonEmptySet[H], CF] =
    c.modifySchema(_.copy(isOptional = false))
      .validate(nonEmpty)
      .mapDecode { set => DecodeResult.fromOption(NonEmptySet.fromSet(SortedSet(set.toSeq: _*))) }(_.toSortedSet)
} 
Example 17
Source File: utils.scala    From lithium   with Apache License 2.0 5 votes vote down vote up
package com.swissborg.lithium

import akka.cluster.swissborg.EitherValues
import cats.data.{NonEmptyList, NonEmptySet}
import org.scalacheck.Arbitrary
import org.scalacheck.Gen._

import scala.collection.immutable.SortedSet

package object utils extends EitherValues {

  
  def splitIn[A](parts: Int, as: NonEmptySet[A]): Arbitrary[NonEmptyList[NonEmptySet[A]]] =
    Arbitrary {
      if (parts <= 1 || parts > as.length) const(NonEmptyList.of(as))
      else {
        for {
          takeN <- chooseNum(1, as.length - parts + 1) // leave enough `as` to have at least 1 element per part
          newSet = as.toSortedSet.take(takeN.toInt)
          newSets <- splitIn(parts - 1, // parts > takeN
                             NonEmptySet.fromSetUnsafe(as.toSortedSet -- newSet)).arbitrary
        } yield NonEmptySet.fromSetUnsafe(newSet) :: newSets
      }
    }

  def pickNonEmptySubset[A: Ordering](as: NonEmptySet[A]): Arbitrary[NonEmptySet[A]] = Arbitrary {
    atLeastOne(as.toSortedSet).map(seq => NonEmptySet.fromSetUnsafe(SortedSet(seq.toSeq: _*)))
  }
} 
Example 18
Source File: KeepRefereeSuite.scala    From lithium   with Apache License 2.0 5 votes vote down vote up
package com.swissborg.lithium

package strategy

import akka.actor.Address
import akka.cluster.ClusterEvent.CurrentClusterState
import akka.cluster.MemberStatus.Up
import akka.cluster.swissborg.TestMember
import cats.Id
import org.scalatest.wordspec.AnyWordSpecLike

import scala.collection.immutable.SortedSet
import org.scalatest.matchers.should.Matchers

class KeepRefereeSuite extends AnyWordSpecLike with Matchers {
  private val aa = TestMember(Address("akka", "sys", "a", 2552), Up)
  private val bb = TestMember(Address("akka", "sys", "b", 2552), Up)
  private val cc = TestMember(Address("akka", "sys", "c", 2552), Up)

  private val referee = aa.address.toString

  "KeepReferee" must {
    "down the unreachable nodes when being the referee node and reaching enough nodes" in {
      val w = WorldView.fromSnapshot(aa, CurrentClusterState(SortedSet(aa, bb, cc), Set(bb), seenBy = Set.empty))

      new KeepReferee[Id](KeepRefereeConfig(referee, 1)).takeDecision(w) should ===(
        Decision.DownUnreachable(w)
      )
    }

    "down the reachable nodes when being the referee and not reaching enough nodes" in {
      val w = WorldView.fromSnapshot(aa, CurrentClusterState(SortedSet(aa, bb, cc), Set(bb), seenBy = Set.empty))

      new strategy.KeepReferee[Id](KeepRefereeConfig(referee, 3)).takeDecision(w) should ===(
        Decision.DownReachable(w)
      )
    }

    "down the unreachable nodes when the referee is reachable and reaching enough nodes" in {
      val w = WorldView.fromSnapshot(cc, CurrentClusterState(SortedSet(aa, bb, cc), Set(bb), seenBy = Set.empty))

      new strategy.KeepReferee[Id](KeepRefereeConfig(referee, 1)).takeDecision(w) should ===(
        Decision.DownUnreachable(w)
      )
    }

    "down the reachable nodes when the referee is reachable and not reaching enough nodes" in {
      val w = WorldView.fromSnapshot(cc, CurrentClusterState(SortedSet(aa, bb, cc), Set(bb), seenBy = Set.empty))

      new strategy.KeepReferee[Id](KeepRefereeConfig(referee, 3)).takeDecision(w) should ===(
        Decision.DownReachable(w)
      )
    }

    "down the reachable nodes when the referee is unreachable" in {
      val w = WorldView.fromSnapshot(bb, CurrentClusterState(SortedSet(aa, bb, cc), Set(aa), seenBy = Set.empty))

      new strategy.KeepReferee[Id](KeepRefereeConfig(referee, 1)).takeDecision(w) should ===(
        Decision.DownReachable(w)
      )

      new strategy.KeepReferee[Id](KeepRefereeConfig(referee, 3)).takeDecision(w) should ===(
        Decision.DownReachable(w)
      )
    }
  }
} 
Example 19
Source File: DecisionSpec.scala    From lithium   with Apache License 2.0 5 votes vote down vote up
package com.swissborg.lithium

package strategy

import cats.Monoid
import cats.implicits._

import scala.collection.immutable.SortedSet

class DecisionSpec extends LithiumSpec {
  "StrategyDecision" must {
    "extract the correct nodes from the world view" in {
      forAll { worldView: WorldView =>
        Decision.DownReachable(worldView).nodesToDown should ===(worldView.reachableNodes)
        Decision.DownUnreachable(worldView).nodesToDown should ===(worldView.unreachableNodes)

        Decision.DownIndirectlyConnected(worldView).nodesToDown should
          ===(worldView.indirectlyConnectedNodes)
      }
    }

    "extract the correct nodes from the decision" in {
      forAll { strategyDecision: Decision =>
        strategyDecision match {
          case Decision.DownReachable(nodesToDown) =>
            strategyDecision.nodesToDown should ===(nodesToDown.map(identity[Node]))

          case Decision.DownUnreachable(nodesToDown) =>
            strategyDecision.nodesToDown should ===(nodesToDown.map(identity[Node]))

          case Decision.DownThese(decision1, decision2) =>
            strategyDecision.nodesToDown should ===(decision1.nodesToDown ++ decision2.nodesToDown)

          case Decision.DownIndirectlyConnected(nodesToDown) =>
            strategyDecision.nodesToDown should ===(nodesToDown.map(identity[Node]))

          case Decision.DownAll(nodesToDown) =>
            strategyDecision.nodesToDown should ===(nodesToDown.toSortedSet)

          case Decision.Idle => strategyDecision.nodesToDown.isEmpty shouldBe true
        }
      }
    }

    "correctly combine decisions" in {
      forAll { decisions: List[Decision] =>
        val expectedNodesToDown: SortedSet[Node] =
          SortedSet(decisions.flatMap(_.nodesToDown): _*)
        val combined: SortedSet[Node] =
          decisions.foldRight(Monoid[Decision].empty)(Monoid[Decision].combine).nodesToDown
        combined should contain theSameElementsAs expectedNodesToDown
      }
    }
  }
} 
Example 20
Source File: Checker.scala    From spark-bam   with Apache License 2.0 5 votes vote down vote up
package org.hammerlab.bam.check.indexed

import hammerlab.iterator._
import org.hammerlab.bam.check
import org.hammerlab.bam.check.Checker.MakeChecker
import org.hammerlab.bam.check.{ MaxReadSize, ReadStartFinder }
import org.hammerlab.bgzf.Pos
import org.hammerlab.channel.{ CachingChannel, SeekableByteChannel }

import scala.collection.immutable.SortedSet

case class Checker(readPositions: SortedSet[Pos])
  extends check.Checker[Boolean]
    with ReadStartFinder {

  override def apply(pos: Pos): Boolean =
    readPositions(pos)

  override def nextReadStart(start: Pos)(
      implicit
      maxReadSize: MaxReadSize
  ): Option[Pos] =
    readPositions
      .iteratorFrom(start)
      .buffered
      .headOption
}

object Checker {
  implicit def makeChecker(implicit records: SortedSet[Pos]): MakeChecker[Boolean, Checker] =
    new MakeChecker[Boolean, Checker] {
      override def apply(ch: CachingChannel[SeekableByteChannel]): Checker =
        Checker(records)
    }
} 
Example 21
Source File: BlocksAndIndexedRecords.scala    From spark-bam   with Apache License 2.0 5 votes vote down vote up
package org.hammerlab.bam.check.indexed

import hammerlab.path._
import org.apache.spark.SparkContext
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.rdd.RDD
import org.hammerlab.args.ByteRanges
import org.hammerlab.bam.check.Blocks
import org.hammerlab.bgzf.Pos
import org.hammerlab.bgzf.block.Metadata
import org.hammerlab.kryo.Registrar

import scala.collection.immutable.SortedSet
import scala.reflect.ClassTag

case class BlocksAndIndexedRecords(blocks: RDD[Metadata],
                                   records: RDD[SortedSet[Pos]])

object BlocksAndIndexedRecords
  extends Registrar {

  def apply[U: ClassTag]()(
      implicit
      path: Path,
      sc: SparkContext,
      rangesBroadcast: Broadcast[Option[ByteRanges]],
      blockArgs: Blocks.Args,
      recordArgs: IndexedRecordPositions.Args
  ): BlocksAndIndexedRecords = {

    val Blocks(blocks, bounds) = Blocks()

    val posBounds =
      bounds
        .copy(
          partitions =
            bounds
              .partitions
              .map {
                _.map {
                  case (start, endOpt) ⇒
                    (
                      Pos(start, 0),
                      endOpt.map(Pos(_, 0))
                    )
                }
              }
        )

    val indexedRecords = IndexedRecordPositions(recordArgs.path)

    val repartitionedRecords = indexedRecords.toSets(posBounds)

    BlocksAndIndexedRecords(
      blocks,
      repartitionedRecords
    )
  }

  register(
    Blocks
  )
} 
Example 22
Source File: IndexedRecordPositions.scala    From spark-bam   with Apache License 2.0 5 votes vote down vote up
package org.hammerlab.bam.check.indexed

import caseapp.{ ValueDescription, HelpMessage ⇒ M, Name ⇒ O }
import hammerlab.path._
import magic_rdds.ordered._
import org.apache.spark.SparkContext
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.rdd.RDD
import org.hammerlab.args.ByteRanges
import org.hammerlab.bgzf.Pos
import org.hammerlab.magic.rdd.ordered.SortedRDD
import org.hammerlab.magic.rdd.ordered.SortedRDD.{ Bounds, bounds }

import scala.collection.immutable.SortedSet


  def apply(path: Path)(
      implicit
      sc: SparkContext,
      rangesBroadcast: Broadcast[Option[ByteRanges]]
  ): IndexedRecordPositions = {
    val reads =
      sc
        .textFile(path.toString)
        .map(
          line ⇒
            line.split(",") match {
              case Array(a, b) ⇒
                Pos(a.toLong, b.toInt)
              case _ ⇒
                throw new IllegalArgumentException(
                  s"Bad record-pos line: $line"
                )
            }
        )
        .filter {
          case Pos(blockPos, _) ⇒
            rangesBroadcast
            .value
            .forall(_.contains(blockPos))
        }
        .cache

    IndexedRecordPositions(
      reads,
      bounds(reads)
    )
  }
} 
Example 23
Source File: ScannerSpec.scala    From GettingWorkDoneWithExtensibleEffects   with Apache License 2.0 5 votes vote down vote up
package scan

import java.io.PrintWriter
import java.nio.file._

import org.specs2._

import scala.collection.immutable.SortedSet

class ScannerSpec extends mutable.Specification {

  "Report Format" ! {
    val base = deletedOnExit(Files.createTempDirectory("exerciseClassic"))
    val base1 = deletedOnExit(fillFile(base, 1))
    val base2 = deletedOnExit(fillFile(base, 2))
    val subdir = deletedOnExit(Files.createTempDirectory(base, "subdir"))
    val sub1 = deletedOnExit(fillFile(subdir, 1))
    val sub3 = deletedOnExit(fillFile(subdir, 3))

    val actual = Scanner.pathScan(FilePath(base), 2)
    val expected = new PathScan(SortedSet(FileSize(sub3, 3), FileSize(base2, 2)), 7, 4)

    actual.mustEqual(expected)
  }

  def fillFile(dir: Path, size: Int) = {
    val path = dir.resolve(s"$size.txt")
    val w = new PrintWriter(path.toFile)
    try w.write("a" * size)
    finally w.close
    path
  }

  def deletedOnExit(p: Path) = {
    p.toFile.deleteOnExit()
    p
  }

} 
Example 24
Source File: PhaseFlavour.scala    From Converter   with GNU General Public License v3.0 5 votes vote down vote up
package org.scalablytyped.converter.internal.importer

import com.olvind.logging.Logger
import org.scalablytyped.converter.internal.phases.{GetDeps, IsCircular, Phase, PhaseRes}
import org.scalablytyped.converter.internal.scalajs.TreeScope
import org.scalablytyped.converter.internal.scalajs.flavours.FlavourImpl

import scala.collection.immutable.SortedSet

class PhaseFlavour(flavour: FlavourImpl) extends Phase[Source, LibScalaJs, LibScalaJs] {

  override def apply(
      source:     Source,
      lib:        LibScalaJs,
      getDeps:    GetDeps[Source, LibScalaJs],
      isCircular: IsCircular,
      _logger:    Logger[Unit],
  ): PhaseRes[Source, LibScalaJs] = {
    val logger = _logger.withContext("flavour", flavour.toString)

    getDeps((lib.dependencies.keys: Iterable[Source]).to[SortedSet]).map {
      case LibScalaJs.Unpack(deps) =>
        val originalScope = new TreeScope.Root(
          libName       = lib.scalaName,
          _dependencies = lib.dependencies.map { case (_, lib) => lib.scalaName -> lib.packageTree },
          logger        = logger,
          pedantic      = false,
          outputPkg     = flavour.outputPkg,
        )

        val tree = flavour.rewrittenTree(originalScope, lib.packageTree)

        LibScalaJs(lib.source)(
          lib.libName,
          lib.scalaName,
          lib.libVersion,
          tree,
          deps,
          lib.isStdLib,
          lib.names,
        )
    }
  }
} 
Example 25
Source File: Phase4Publish.scala    From Converter   with GNU General Public License v3.0 5 votes vote down vote up
package org.scalablytyped.converter.internal
package importer

import com.olvind.logging.Logger
import org.scalablytyped.converter.internal.importer.build._
import org.scalablytyped.converter.internal.phases.{GetDeps, IsCircular, Phase, PhaseRes}

import scala.collection.immutable.SortedSet
import scala.concurrent.Await
import scala.concurrent.duration.Duration


      val alreadyWrittenMavenFiles: MavenLayout[os.RelPath, os.Path] =
        MavenLayout(
          lib.project,
          lib.localIvyFiles.jarFile._1,
          lib.localIvyFiles.sourceFile._1,
          lib.localIvyFiles.pomFile._1,
        )

      val published: Unit =
        Await.result(publisher.publish(lib.project, alreadyWrittenMavenFiles), Duration.Inf)
      PhaseRes.Ok(PublishedSbtProject(lib.project)(lib.classfileDir, lib.localIvyFiles, Some(published)))
    }
} 
Example 26
Source File: CreateSizeList.scala    From abc   with Apache License 2.0 5 votes vote down vote up
package com.rklaehn.abc

import org.github.jamm.MemoryMeter
import cats.kernel.instances.all._
import scala.collection.immutable.{SortedSet, HashSet}

object CreateSizeList extends App {

  lazy val mm = new MemoryMeter()
  lazy val overhead = mm.measure(new java.lang.Object)
  lazy val pointerSize = (mm.measure(new Array[java.lang.Object](256)) - mm.measure(new Array[java.lang.Object](128))) / 128

  println("|n|ArraySet|HashSet|SortedSet|")
  println("|--:|--:|--:|--:|")
  for(n <- Seq(1, 10, 100, 1000, 10000, 100000)) {
    val xs = Array(1 to n: _*)
    val as = ArraySet[Int](xs: _*)
    val hs = HashSet[Int](xs: _*)
    val ss = SortedSet[Int](xs: _*)
    val ass = mm.measureDeep(as)
    val hss = mm.measureDeep(hs)
    val sss = mm.measureDeep(ss)
    println(s"| $n| $ass| $hss| $sss|")
  }

  println()
  println("|n|ArrayMap|HashMap|SortedMap|")
  println("|--:|--:|--:|--:|")
  for(n <- Seq(1, 10, 100, 1000, 10000, 100000)) {
    val xs = Array(1 to n: _*)
    val entries = xs.map(x => x -> x)
    val as = ArrayMap(entries: _*)
    val hs = HashSet(entries: _*)
    val ss = SortedSet(entries: _*)
    val ass = mm.measureDeep(as)
    val hss = mm.measureDeep(hs)
    val sss = mm.measureDeep(ss)
    println(s"| $n| $ass| $hss| $sss|")
  }

  println()
  println("|n|ArraySeq|Vector|List|")
  println("|--:|--:|--:|--:|")
  for(n <- Seq(1, 10, 100, 1000, 10000, 100000)) {
    val xs = Array(1 to n: _*)
    val as = ArraySeq(xs: _*)
    val hs = Vector(xs: _*)
    val ss = List(xs: _*)
    val ass = mm.measureDeep(as)
    val hss = mm.measureDeep(hs)
    val sss = mm.measureDeep(ss)
    println(s"| $n| $ass| $hss| $sss|")
  }
} 
Example 27
Source File: SetCreateBench.scala    From abc   with Apache License 2.0 5 votes vote down vote up
package com.rklaehn.abc

import java.util.concurrent.TimeUnit

import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole
 import cats.kernel.instances.all._

import scala.collection.immutable.{HashSet, SortedSet}

sealed trait SetCreateBenchOps {
  def createBulk: Any
  def createElements: Any
}

object SetCreateBenchOps extends BenchUtil {

  def apply(a: Seq[Int], kind: String) = {
    val a1 = a.map(mix).toArray
    require(a1.length == a.length)
    kind match {
      case "hashset" => ScalaCollectionBench(a1, x => HashSet.apply(x: _*))
      case "sortedset" => ScalaCollectionBench(a1, x => SortedSet.apply(x: _*))
      case "arrayset" => TypeClassBench(a1)
      case "arrayset2" => ???
    }
  }

  private final case class ScalaCollectionBench(a: Array[Int], f: Array[Int] => Any) extends SetCreateBenchOps {
    override def createBulk: Any = f(a)
    override def createElements: Any = f(a)
  }

  private final case class TypeClassBench(a: Array[Int]) extends SetCreateBenchOps {
    override def createBulk: Any = {
      ArraySet(a: _*)
    }
    override def createElements: Any = {
      a.foldLeft(ArraySet.empty[Int])(_ + _)
    }
  }
}

@BenchmarkMode(Array(Mode.AverageTime))
@OutputTimeUnit(TimeUnit.NANOSECONDS)
@State(Scope.Thread)
class SetCreateBench {

  @Param(Array("1", "10", "100", "1000", "10000", "100000"))
  var size = 0

  @Param(Array("arrayset", "hashset", "sortedset")) //, "arrayset2"))
  var kind = ""

  var bench: SetCreateBenchOps = _

  @Setup
  def setup(): Unit = {
    bench = SetCreateBenchOps(0 until size, kind)
  }

  @Benchmark
  def createBulk(x: Blackhole): Unit = x.consume(bench.createBulk)

  @Benchmark
  def createElements(x: Blackhole): Unit = x.consume(bench.createElements)
} 
Example 28
Source File: SetElementBench.scala    From abc   with Apache License 2.0 5 votes vote down vote up
package com.rklaehn.abc

import java.util.concurrent.TimeUnit

import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole
 import cats.kernel.instances.all._
import ScalaCollectionConverters._

import scala.collection.immutable.{HashSet, SortedSet}

sealed trait SetElementBenchOps {
  def containsTrue: Any
  def containsFalse: Any
}

object SetElementBenchOps extends BenchUtil {

  def apply(a: Seq[Int], c: Int, n: Int, kind: String) = {
    val a1 = a.map(mix)
    val c1 = mix(c)
    val n1 = mix(n)
    require(a1.length == a.length)
    kind match {
      case "hashset" => ScalaCollectionBench(HashSet(a1: _*), c1, n1)
      case "sortedset" => ScalaCollectionBench(SortedSet(a1: _*), c1, n1)
      case "arrayset" => TypeClassBench(ArraySet(a1: _*), c1, n1)
      case "arrayset2" => ScalaCollectionBench(ArraySet(a1: _*).asCollection, c1, n1)
    }
  }

  private final case class ScalaCollectionBench(a: Set[Int], c: Int, n: Int) extends SetElementBenchOps {
    override def containsTrue: Boolean = a.contains(c)
    override def containsFalse: Boolean = a.contains(n)
  }

  private final case class TypeClassBench(a: ArraySet[Int], c: Int, n: Int) extends SetElementBenchOps {
    override def containsTrue: Boolean = a.contains(c)
    override def containsFalse: Boolean = a.contains(n)
  }
}

@BenchmarkMode(Array(Mode.AverageTime))
@OutputTimeUnit(TimeUnit.NANOSECONDS)
@State(Scope.Thread)
class SetElementBench {

  @Param(Array("1", "10", "100", "1000", "10000", "100000"))
  var size = 0

  @Param(Array("arrayset", "hashset", "sortedset")) //, "arrayset2"))
  var kind = ""

  var k: Int = 0
  var bench: SetElementBenchOps = _

  @Setup
  def setup(): Unit = {
    val c = (0.3 * size).toInt // a value that is contained in the set
    val n = (1.3 * size).toInt // a value that is not contained in the set
    bench = SetElementBenchOps(0 until size, c, n, kind)
  }

  @Benchmark
  def containsFalse(x: Blackhole): Unit = x.consume(bench.containsFalse)

  @Benchmark
  def containsTrue(x: Blackhole): Unit = x.consume(bench.containsTrue)
} 
Example 29
Source File: SetSetBench.scala    From abc   with Apache License 2.0 5 votes vote down vote up
package com.rklaehn.abc

import java.util.concurrent.TimeUnit

import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole
 import cats.kernel.instances.all._
import ScalaCollectionConverters._

import scala.collection.immutable.{HashSet, SortedSet}

sealed trait SetSetBenchOps {
  def union: Any
  def intersect: Any
  def diff: Any
  def subsetOf: Boolean
  def filter(f: Int => Boolean): Any
}

object SetSetBenchOps extends BenchUtil {

  def apply(a: Seq[Int], b: Seq[Int], kind: String) = {
    val a1 = a.map(mix)
    val b1 = b.map(mix)
    kind match {
      case "hashset" => ScalaCollectionBench(HashSet(a1: _*), HashSet(b1: _*))
      case "sortedset" => ScalaCollectionBench(SortedSet(a1: _*), SortedSet(b1: _*))
      case "arrayset" => TypeClassBench(ArraySet(a1: _*), ArraySet(b1: _*))
      case "arrayset2" => ScalaCollectionBench(ArraySet(a1: _*).asCollection, ArraySet(b1: _*).asCollection)
    }
  }

  private final case class ScalaCollectionBench(a: Set[Int], b: Set[Int]) extends SetSetBenchOps {
    override def union: Any = a union b
    override def diff: Any = a diff b
    override def subsetOf: Boolean = a subsetOf b
    override def intersect: Any = a intersect b
    override def filter(f: (Int) => Boolean): Any = a filter f
  }

  private final case class TypeClassBench(a: ArraySet[Int], b: ArraySet[Int]) extends SetSetBenchOps {
    override def union: Any = a union b
    override def diff: Any = a diff b
    override def subsetOf: Boolean = a subsetOf b
    override def intersect: Any = a intersect b
    override def filter(f: (Int) => Boolean): Any = a filter f
  }
}

@BenchmarkMode(Array(Mode.AverageTime))
@OutputTimeUnit(TimeUnit.NANOSECONDS)
@State(Scope.Thread)
class SetSetBench {

  @Param(Array("1", "10", "100", "1000", "10000", "100000"))
  var size = 0

  @Param(Array("0.0", "0.5", "1.0"))
//  @Param(Array("0.5"))
  var offset = 0.0

  @Param(Array("arrayset", "hashset", "sortedset")) //, "arrayset2"))
  var kind = ""

  var k: Int = 0
  var bench: SetSetBenchOps = _

  val shift = 1000000 // so we don't get the cached java.lang.Integer instances

  @Setup
  def setup(): Unit = {
    k = (offset * size).toInt
    bench = SetSetBenchOps(shift until (shift + size), (shift + k) until (shift + k + size), kind)
  }


  @Benchmark
  def union(x: Blackhole): Unit = {
    x.consume(bench.union)
  }

  @Benchmark
  def intersect(x: Blackhole): Unit = {
    x.consume(bench.intersect)
  }

  @Benchmark
  def diff(x: Blackhole): Unit = {
    x.consume(bench.diff)
  }

  @Benchmark
  def subsetOf(x: Blackhole): Unit = {
    x.consume(bench.subsetOf)
  }

  @Benchmark
  def filter(x: Blackhole): Unit = {
    x.consume(bench.filter(_ < k + shift))
  }
} 
Example 30
Source File: SetCreateAccessBench.scala    From abc   with Apache License 2.0 5 votes vote down vote up
package com.rklaehn.abc

import cats.kernel.instances.all._
import ichi.bench.Thyme
import ichi.bench.Thyme.HowWarm

import scala.collection.immutable.{HashSet, SortedSet}

object SetCreateAccessBench extends App {

  val th = Thyme.warmed(verbose = println, warmth = HowWarm.BenchOff)

  val ns = Array(1, 10, 100, 1000, 10000, 100000)

  def createInt(): Unit = {
    for (n ← ns) {
      val elements = (0 until n).toArray
      def s0 = HashSet(elements:_*)
      def s1 = SortedSet(elements:_*)
      def s2 = ArraySet(elements:_*)
      th.pbenchOffWarm(s"Create HashSet[Int] vs ArraySet[Int] $n")(
        th.Warm(s0.asInstanceOf[AnyRef]))(
        th.Warm(s2.asInstanceOf[AnyRef]))
      th.pbenchOffWarm(s"Create SortedSet[Int] vs ArraySet[Int] $n")(
        th.Warm(s1.asInstanceOf[AnyRef]))(
        th.Warm(s2.asInstanceOf[AnyRef]))
    }
  }

  def accessInt(): Unit = {
    for (n ← ns) {
      val elements = (0 until n).toArray
      val s0 = HashSet(elements:_*)
      val s1 = SortedSet(elements:_*)
      val s2 = ArraySet(elements:_*)
      val x = 0
      th.pbenchOffWarm(s"Access HashSet[Int] vs ArraySet[Int] $n")(
        th.Warm(s0(x)))(
        th.Warm(s2(x)))
      th.pbenchOffWarm(s"Access SortedSet[Int] vs ArraySet[Int] $n")(
        th.Warm(s1(x)))(
        th.Warm(s2(x)))
    }
  }

  createInt()
  accessInt()
} 
Example 31
Source File: TestFactories.scala    From maha   with Apache License 2.0 5 votes vote down vote up
package com.yahoo.maha.service.factory

import com.yahoo.maha.core.{DimCostEstimator, DimensionCandidate, FactCostEstimator, Filter, RowsEstimate}
import com.yahoo.maha.core.request.ReportingRequest
import com.yahoo.maha.service.MahaServiceConfig.MahaConfigResult
import com.yahoo.maha.service.MahaServiceConfigContext

import org.json4s.JValue

import scala.collection.immutable.SortedSet
import scala.collection.mutable


class TestFactEstimator extends FactCostEstimator {
  override def isGrainKey(grainKey: String): Boolean = true

  def getRowsEstimate(schemaRequiredEntitySet:Set[(String, Filter)]
                      , dimensionsCandidates: SortedSet[DimensionCandidate]
                      , factDimList: List[String]
                      , request: ReportingRequest
                      , filters: scala.collection.mutable.Map[String, Filter]
                      , defaultRowCount:Long): RowsEstimate = {
    if (request.isDebugEnabled) {
      RowsEstimate(10000, true, Long.MaxValue, false)
    } else RowsEstimate(1000, true, Long.MaxValue, false)
  }

  override def getSchemaBasedGrainRows(grainKey: String, request: ReportingRequest, filters: mutable.Map[String, Filter], defaultRowCount: Long): Option[Long] = Option(1000)
  override def getAllBasedGrainRows(grainKey: String, request: ReportingRequest, filters: mutable.Map[String, Filter]): Option[Long] = Option(1000)
}

class TestDimEstimator extends DimCostEstimator {
  override def getCardinalityEstimate(grainKey: String, request: ReportingRequest, filters: mutable.Map[String, Filter]): Option[Long] = {
    if(request.isDebugEnabled) {
      Some(10000)
    } else Some(1000)
  }
}

import _root_.scalaz._
import Scalaz._


class TestFactCostEstimatoryFactory extends FactCostEstimatorFactory {
  override def fromJson(config: JValue)(implicit context: MahaServiceConfigContext): MahaConfigResult[FactCostEstimator] = new TestFactEstimator().successNel

  override def supportedProperties: List[(String, Boolean)] = List.empty
}

class TestDimCostEstimatoryFactory extends DimCostEstimatorFactory {
  override def fromJson(config: JValue)(implicit context: MahaServiceConfigContext): MahaConfigResult[DimCostEstimator] = new TestDimEstimator().successNel

  override def supportedProperties: List[(String, Boolean)] = List.empty
} 
Example 32
Source File: ScannerSpec.scala    From GettingWorkDoneWithExtensibleEffects   with Apache License 2.0 5 votes vote down vote up
package scan

import java.io.PrintWriter
import java.nio.file._

import org.specs2._

import scala.collection.immutable.SortedSet

class ScannerSpec extends mutable.Specification {

  "Report Format" ! {
    val base = deletedOnExit(Files.createTempDirectory("exerciseClassic"))
    val base1 = deletedOnExit(fillFile(base, 1))
    val base2 = deletedOnExit(fillFile(base, 2))
    val subdir = deletedOnExit(Files.createTempDirectory(base, "subdir"))
    val sub1 = deletedOnExit(fillFile(subdir, 1))
    val sub3 = deletedOnExit(fillFile(subdir, 3))

    val actual = Scanner.pathScan(FilePath(base), 2)
    val expected = new PathScan(SortedSet(FileSize(sub3, 3), FileSize(base2, 2)), 7, 4)

    actual.mustEqual(expected)
  }

  def fillFile(dir: Path, size: Int) = {
    val path = dir.resolve(s"$size.txt")
    val w = new PrintWriter(path.toFile)
    try w.write("a" * size)
    finally w.close
    path
  }

  def deletedOnExit(p: Path) = {
    p.toFile.deleteOnExit()
    p
  }

} 
Example 33
Source File: ScannerSpec.scala    From GettingWorkDoneWithExtensibleEffects   with Apache License 2.0 5 votes vote down vote up
package scan

import java.io.FileNotFoundException
import java.io.IOException
import java.nio.file._

import cats._
import cats.data._
import cats.implicits._
import org.atnos.eff._
import org.atnos.eff.all._
import org.atnos.eff.syntax.all._
import org.atnos.eff.addon.monix._
import org.atnos.eff.addon.monix.task._
import org.atnos.eff.syntax.addon.monix.task._
import org.specs2._

import scala.collection.immutable.SortedSet
import scala.concurrent.duration._
import monix.eval._
import monix.execution.Scheduler.Implicits.global

class ScannerSpec extends mutable.Specification {

  case class MockFilesystem(directories: Map[Directory, List[FilePath]], fileSizes: Map[File, Long]) extends Filesystem {

    def length(file: File) = fileSizes.getOrElse(file, throw new IOException())

    def listFiles(directory: Directory) = directories.getOrElse(directory, throw new IOException())

    def filePath(path: String): FilePath =
      if (directories.keySet.contains(Directory(path)))
        Directory(path)
      else if (fileSizes.keySet.contains(File(path)))
        File(path)
      else
        throw new FileNotFoundException(path)
  }

  val base = Directory("base")
  val base1 = File(s"${base.path}/1.txt")
  val base2 = File(s"${base.path}/2.txt")
  val subdir = Directory(s"${base.path}/subdir")
  val sub1 = File(s"${subdir.path}/1.txt")
  val sub3 = File(s"${subdir.path}/3.txt")
  val directories = Map(
    base -> List(subdir, base1, base2),
    subdir -> List(sub1, sub3)
  )
  val fileSizes = Map(base1 -> 1L, base2 -> 2L, sub1 -> 1L, sub3 -> 3L)
  val fs = MockFilesystem(directories, fileSizes)

  type R = Fx.fx4[Task, FilesystemCmd, Reader[ScanConfig, ?], Writer[Log, ?]]

  def run[T](program: Eff[R, T]) =
    program.runReader(ScanConfig(2)).runFilesystemCmds(fs).taskAttempt.runWriter.runAsync.runSyncUnsafe(3.seconds)

  val expected = Right(new PathScan(SortedSet(FileSize(sub3, 3), FileSize(base2, 2)), 7, 4))
  val expectedLogs = Set(
    Log.info("Scan started on Directory(base)"),
    Log.debug("Scanning directory 'Directory(base)': 1 subdirectories and 2 files"),
    Log.debug("File base/1.txt Size 1 B"),
    Log.debug("File base/2.txt Size 2 B"),
    Log.debug("Scanning directory 'Directory(base/subdir)': 0 subdirectories and 2 files"),
    Log.debug("File base/subdir/1.txt Size 1 B"),
    Log.debug("File base/subdir/3.txt Size 3 B")
  )

  val (actual, logs) = run(Scanner.pathScan(base))

  "Report Format" ! {actual.mustEqual(expected)}

  "Logs messages are emitted (ignores order due to non-determinstic concurrent execution)" ! {
    logs.forall(expectedLogs.contains)
  }
} 
Example 34
Source File: sets.scala    From Converter   with GNU General Public License v3.0 5 votes vote down vote up
package org.scalablytyped.converter.internal
import scala.collection.immutable.SortedSet

object sets {
  object EmptySet {
    def unapply[T](ts: Set[T]): Boolean = ts.isEmpty
  }

  object NonEmptySet {
    def unapply[T](ts: Set[T]): Option[Set[T]] = if (ts.nonEmpty) Some(ts) else None
  }

  @inline final implicit class SetOps[T](private val ts: Set[T]) extends AnyVal {
    @inline def sorted(implicit O: Ordering[T]): SortedSet[T] =
      SortedSet.empty[T] ++ ts
  }
} 
Example 35
Source File: ScannerSpec.scala    From GettingWorkDoneWithExtensibleEffects   with Apache License 2.0 5 votes vote down vote up
package scan

import java.io.FileNotFoundException
import java.io.IOException
import java.nio.file._

import cats._
import cats.data._
import cats.implicits._
import org.atnos.eff._
import org.atnos.eff.all._
import org.atnos.eff.syntax.all._
import org.atnos.eff.addon.monix._
import org.atnos.eff.addon.monix.task._
import org.atnos.eff.syntax.addon.monix.task._
import org.specs2._

import scala.collection.immutable.SortedSet
import scala.concurrent.duration._
import monix.eval._
import monix.execution.Scheduler.Implicits.global

class ScannerSpec extends mutable.Specification {

  case class MockFilesystem(directories: Map[Directory, List[FilePath]], fileSizes: Map[File, Long]) extends Filesystem {

    def length(file: File) = fileSizes.getOrElse(file, throw new IOException())

    def listFiles(directory: Directory) = directories.getOrElse(directory, throw new IOException())

    def filePath(path: String): FilePath =
      if (directories.keySet.contains(Directory(path)))
        Directory(path)
      else if (fileSizes.keySet.contains(File(path)))
        File(path)
      else
        throw new FileNotFoundException(path)
  }

  val base = Directory("base")
  val base1 = File(s"${base.path}/1.txt")
  val base2 = File(s"${base.path}/2.txt")
  val subdir = Directory(s"${base.path}/subdir")
  val sub1 = File(s"${subdir.path}/1.txt")
  val sub3 = File(s"${subdir.path}/3.txt")
  val directories = Map(
    base -> List(subdir, base1, base2),
    subdir -> List(sub1, sub3)
  )
  val fileSizes = Map(base1 -> 1L, base2 -> 2L, sub1 -> 1L, sub3 -> 3L)
  val fs = MockFilesystem(directories, fileSizes)

  type R = Fx.fx4[Task, FilesystemCmd, Reader[ScanConfig, ?], Writer[Log, ?]]

  def run[T](program: Eff[R, T]) =
    program.runReader(ScanConfig(2)).runFilesystemCmds(fs).taskAttempt.runWriter.runAsync.runSyncUnsafe(3.seconds)

  val expected = Right(new PathScan(SortedSet(FileSize(sub3, 3), FileSize(base2, 2)), 7, 4))
  val expectedLogs = Set(
    Log.info("Scan started on Directory(base)"),
    Log.debug("Scanning directory 'Directory(base)': 1 subdirectories and 2 files"),
    Log.debug("File base/1.txt Size 1 B"),
    Log.debug("File base/2.txt Size 2 B"),
    Log.debug("Scanning directory 'Directory(base/subdir)': 0 subdirectories and 2 files"),
    Log.debug("File base/subdir/1.txt Size 1 B"),
    Log.debug("File base/subdir/3.txt Size 3 B")
  )

  val (actual, logs) = run(Scanner.pathScan(base))

  "Report Format" ! {actual.mustEqual(expected)}

  "Logs messages are emitted (ignores order due to non-determinstic concurrent execution)" ! {
    logs.forall(expectedLogs.contains)
  }
} 
Example 36
Source File: ScannerSpec.scala    From GettingWorkDoneWithExtensibleEffects   with Apache License 2.0 5 votes vote down vote up
package scan

import java.io.PrintWriter
import java.nio.file._

import org.specs2._

import scala.collection.immutable.SortedSet

import scala.concurrent.duration._

import monix.execution.Scheduler.Implicits.global

class ScannerSpec extends mutable.Specification {

  "Report Format" ! {
    val base = deletedOnExit(Files.createTempDirectory("exerciseTask"))
    val base1 = deletedOnExit(fillFile(base, 1))
    val base2 = deletedOnExit(fillFile(base, 2))
    val subdir = deletedOnExit(Files.createTempDirectory(base, "subdir"))
    val sub1 = deletedOnExit(fillFile(subdir, 1))
    val sub3 = deletedOnExit(fillFile(subdir, 3))

    val actual = Scanner.pathScan(FilePath(base), 2).runSyncUnsafe(3.seconds)
    val expected = new PathScan(SortedSet(FileSize(sub3, 3), FileSize(base2, 2)), 7, 4)

    actual.mustEqual(expected)
  }

  def fillFile(dir: Path, size: Int) = {
    val path = dir.resolve(s"$size.txt")
    val w = new PrintWriter(path.toFile)
    try w.write("a" * size)
    finally w.close
    path
  }

  def deletedOnExit(p: Path) = {
    p.toFile.deleteOnExit()
    p
  }

} 
Example 37
Source File: ScannerSpec.scala    From GettingWorkDoneWithExtensibleEffects   with Apache License 2.0 5 votes vote down vote up
package scan

import java.io._
import java.io._
import java.nio.file._

import cats._
import cats.data._
import cats.implicits._

import org.atnos.eff._
import org.atnos.eff.all._
import org.atnos.eff.syntax.all._

import org.atnos.eff.addon.monix._
import org.atnos.eff.addon.monix.task._
import org.atnos.eff.syntax.addon.monix.task._

import org.specs2._

import scala.collection.immutable.SortedSet

import scala.concurrent.duration._

import monix.execution.Scheduler.Implicits.global

class ScannerSpec extends mutable.Specification {

  import EffOptics._

  case class MockFilesystem(directories: Map[Directory, List[FilePath]], fileSizes: Map[File, Long]) extends Filesystem {

    def length(file: File) = fileSizes.getOrElse(file, throw new IOException())

    def listFiles(directory: Directory) = directories.getOrElse(directory, throw new IOException())

    def filePath(path: String): FilePath =
      if (directories.keySet.contains(Directory(path)))
        Directory(path)
      else if (fileSizes.keySet.contains(File(path)))
        File(path)
      else
        throw new FileNotFoundException(path)
  }

  "file scan" ! {
    val base = Directory("base")
    val base1 = File(s"${base.path}/1.txt")
    val base2 = File(s"${base.path}/2.txt")
    val subdir = Directory(s"${base.path}/subdir")
    val sub1 = File(s"${subdir.path}/1.txt")
    val sub3 = File(s"${subdir.path}/3.txt")
    val fs: Filesystem = MockFilesystem(
      Map(
        base -> List(subdir, base1, base2),
        subdir -> List(sub1, sub3)
      ),
      Map(base1 -> 1, base2 -> 2, sub1 -> 1, sub3 -> 3)
    )

    val program = Scanner.pathScan[Scanner.R](base)
    val actual = program.runReader(AppConfig(ScanConfig(2), fs)).runAsync.runSyncUnsafe(3.seconds)
    val expected = new PathScan(SortedSet(FileSize(sub3, 3), FileSize(base2, 2)), 7, 4)

    actual.mustEqual(expected)
  }
} 
Example 38
Source File: ScannerSpec.scala    From GettingWorkDoneWithExtensibleEffects   with Apache License 2.0 5 votes vote down vote up
package scan

import java.io._
import java.io._
import java.nio.file._

import cats._
import cats.data._
import cats.implicits._

import org.atnos.eff._
import org.atnos.eff.all._
import org.atnos.eff.syntax.all._

import org.atnos.eff.addon.monix._
import org.atnos.eff.addon.monix.task._
import org.atnos.eff.syntax.addon.monix.task._

import org.specs2._

import scala.collection.immutable.SortedSet

import scala.concurrent.duration._

import monix.execution.Scheduler.Implicits.global

class ScannerSpec extends mutable.Specification {

  case class MockFilesystem(directories: Map[Directory, List[FilePath]], fileSizes: Map[File, Long]) extends Filesystem {

    def length(file: File) = fileSizes.getOrElse(file, throw new IOException())

    def listFiles(directory: Directory) = directories.getOrElse(directory, throw new IOException())

    def filePath(path: String): FilePath =
      if (directories.keySet.contains(Directory(path)))
        Directory(path)
      else if (fileSizes.keySet.contains(File(path)))
        File(path)
      else
        throw new FileNotFoundException(path)
  }

  "file scan" ! {
    val base = Directory("base")
    val base1 = File(s"${base.path}/1.txt")
    val base2 = File(s"${base.path}/2.txt")
    val subdir = Directory(s"${base.path}/subdir")
    val sub1 = File(s"${subdir.path}/1.txt")
    val sub3 = File(s"${subdir.path}/3.txt")
    val fs: Filesystem = MockFilesystem(
      Map(
        base -> List(subdir, base1, base2),
        subdir -> List(sub1, sub3)
      ),
      Map(base1 -> 1, base2 -> 2, sub1 -> 1, sub3 -> 3)
    )

    val program = Scanner.pathScan[Scanner.R](base)
    val actual = program.runReader(ScanConfig(2)).runReader(fs).runAsync.runSyncUnsafe(3.seconds)
    val expected = new PathScan(SortedSet(FileSize(sub3, 3), FileSize(base2, 2)), 7, 4)

    actual.mustEqual(expected)
  }
} 
Example 39
Source File: ScannerSpec.scala    From GettingWorkDoneWithExtensibleEffects   with Apache License 2.0 5 votes vote down vote up
package scan

import java.io.FileNotFoundException
import java.io.IOException
import java.nio.file._

import cats._
import cats.data._
import cats.implicits._
import org.atnos.eff._
import org.atnos.eff.all._
import org.atnos.eff.syntax.all._
import org.atnos.eff.addon.monix._
import org.atnos.eff.addon.monix.task._
import org.atnos.eff.syntax.addon.monix.task._
import org.specs2._

import scala.collection.immutable.SortedSet
import scala.concurrent.duration._
import monix.eval._
import monix.execution.Scheduler.Implicits.global

class ScannerSpec extends mutable.Specification {

  case class MockFilesystem(directories: Map[Directory, List[FilePath]], fileSizes: Map[File, Long]) extends Filesystem {

    def length(file: File) = fileSizes.getOrElse(file, throw new IOException())

    def listFiles(directory: Directory) = directories.getOrElse(directory, throw new IOException())

    def filePath(path: String): FilePath =
      if (directories.keySet.contains(Directory(path)))
        Directory(path)
      else if (fileSizes.keySet.contains(File(path)))
        File(path)
      else
        throw new FileNotFoundException(path)
  }

  val base = Directory("base")
  val base1 = File(s"${base.path}/1.txt")
  val base2 = File(s"${base.path}/2.txt")
  val subdir = Directory(s"${base.path}/subdir")
  val sub1 = File(s"${subdir.path}/1.txt")
  val sub3 = File(s"${subdir.path}/3.txt")
  val directories = Map(
    base -> List(subdir, base1, base2),
    subdir -> List(sub1, sub3)
  )
  val fileSizes = Map(base1 -> 1L, base2 -> 2L, sub1 -> 1L, sub3 -> 3L)
  val fs = MockFilesystem(directories, fileSizes)

  type R = Fx.fx4[Task, Reader[Filesystem, ?], Reader[ScanConfig, ?], Writer[Log, ?]]

  def run[T](program: Eff[R, T], fs: Filesystem) =
    program.runReader(ScanConfig(2)).runReader(fs).taskAttempt.runWriter.runAsync.runSyncUnsafe(3.seconds)

  val expected = Right(new PathScan(SortedSet(FileSize(sub3, 3), FileSize(base2, 2)), 7, 4))
  val expectedLogs = Set(
    Log.info("Scan started on Directory(base)"),
    Log.debug("Scanning directory 'Directory(base)': 1 subdirectories and 2 files"),
    Log.debug("File base/1.txt Size 1 B"),
    Log.debug("File base/2.txt Size 2 B"),
    Log.debug("Scanning directory 'Directory(base/subdir)': 0 subdirectories and 2 files"),
    Log.debug("File base/subdir/1.txt Size 1 B"),
    Log.debug("File base/subdir/3.txt Size 3 B")
  )

  val (actual, logs) = run(Scanner.pathScan(base), fs)

  "Report Format" ! {actual.mustEqual(expected)}

  "Logs messages are emitted (ignores order due to non-determinstic concurrent execution)" ! {
    expectedLogs.forall(logs.contains)
  }
} 
Example 40
Source File: ScannerSpec.scala    From GettingWorkDoneWithExtensibleEffects   with Apache License 2.0 5 votes vote down vote up
package scan

import java.io.PrintWriter
import java.nio.file._

import org.specs2._

import scala.collection.immutable.SortedSet

import cats.effect._

import org.atnos.eff.addon.cats.effect.IOEffect._
import org.atnos.eff.syntax.addon.cats.effect._

class ScannerSpec extends mutable.Specification {

  "Report Format" ! {
    val base = deletedOnExit(Files.createTempDirectory("exercise1"))
    val base1 = deletedOnExit(fillFile(base, 1))
    val base2 = deletedOnExit(fillFile(base, 2))
    val subdir = deletedOnExit(Files.createTempDirectory(base, "subdir"))
    val sub1 = deletedOnExit(fillFile(subdir, 1))
    val sub3 = deletedOnExit(fillFile(subdir, 3))

    val scanProgram = Scanner.pathScan(base, 2)
    val actual = scanProgram.unsafeRunSync
    val expected = new PathScan(SortedSet(FileSize(sub3, 3), FileSize(base2, 2)), 7, 4)

    actual.mustEqual(expected)
  }

  def fillFile(dir: Path, size: Int) = {
    val path = dir.resolve(s"$size.txt")
    val w = new PrintWriter(path.toFile)
    try w.write("a" * size)
    finally w.close
    path
  }

  def deletedOnExit(p: Path) = {
    p.toFile.deleteOnExit()
    p
  }

} 
Example 41
Source File: ScannerSpec.scala    From GettingWorkDoneWithExtensibleEffects   with Apache License 2.0 5 votes vote down vote up
package scan

import java.io.FileNotFoundException
import java.io.IOException
import java.nio.file._

import cats._
import cats.data._
import cats.implicits._
import org.atnos.eff._
import org.atnos.eff.all._
import org.atnos.eff.syntax.all._
import org.atnos.eff.addon.monix._
import org.atnos.eff.addon.monix.task._
import org.atnos.eff.syntax.addon.monix.task._
import org.specs2._

import scala.collection.immutable.SortedSet
import scala.concurrent.duration._
import monix.eval._
import monix.execution.Scheduler.Implicits.global

class ScannerSpec extends mutable.Specification {

  case class MockFilesystem(directories: Map[Directory, List[FilePath]], fileSizes: Map[File, Long]) extends Filesystem {

    def length(file: File) = fileSizes.getOrElse(file, throw new IOException())

    def listFiles(directory: Directory) = directories.getOrElse(directory, throw new IOException())

    def filePath(path: String): FilePath =
      if (directories.keySet.contains(Directory(path)))
        Directory(path)
      else if (fileSizes.keySet.contains(File(path)))
        File(path)
      else
        throw new FileNotFoundException(path)
  }

  val base = Directory("base")
  val linkTarget = File(s"/somewhere/else/7.txt")
  val base1 = File(s"${base.path}/1.txt")
  val baseLink = Symlink(s"${base.path}/7.txt", linkTarget)
  val subdir = Directory(s"${base.path}/subdir")
  val sub2 = File(s"${subdir.path}/2.txt")
  val subLink = Symlink(s"${subdir.path}/7.txt", linkTarget)
  val directories = Map(
    base -> List(subdir, base1, baseLink),
    subdir -> List(sub2, subLink)
  )
  val fileSizes = Map(base1 -> 1L, sub2 -> 2L, linkTarget -> 7L)
  val fs = MockFilesystem(directories, fileSizes)

  type R = Fx.fx5[Task, Reader[Filesystem, ?], Reader[ScanConfig, ?], Writer[Log, ?], State[Set[FilePath], ?]]

  def run[T](program: Eff[R, T], fs: Filesystem) =
    program.runReader(ScanConfig(2)).runReader(fs).evalStateZero[Set[FilePath]].taskAttempt.runWriter[Log].runAsync.runSyncUnsafe(3.seconds)

  val expected = Right(new PathScan(SortedSet(FileSize(linkTarget, 7), FileSize(sub2, 2)), 10, 3))

  val (actual, logs) = run(Scanner.pathScan[R](base), fs)

  "Report Format" ! {actual.mustEqual(expected)}

} 
Example 42
Source File: ScannerSpec.scala    From GettingWorkDoneWithExtensibleEffects   with Apache License 2.0 5 votes vote down vote up
package scan

import java.io.FileNotFoundException
import java.io.IOException
import java.nio.file._

import cats._
import cats.data._
import cats.implicits._
import org.atnos.eff._
import org.atnos.eff.all._
import org.atnos.eff.syntax.all._
import org.atnos.eff.addon.monix._
import org.atnos.eff.addon.monix.task._
import org.atnos.eff.syntax.addon.monix.task._
import org.specs2._

import scala.collection.immutable.SortedSet
import scala.concurrent.duration._
import monix.eval._
import monix.execution.Scheduler.Implicits.global

class ScannerSpec extends mutable.Specification {

  case class MockFilesystem(directories: Map[Directory, List[FilePath]], fileSizes: Map[File, Long]) extends Filesystem {

    def length(file: File) = fileSizes.getOrElse(file, throw new IOException())

    def listFiles(directory: Directory) = directories.getOrElse(directory, throw new IOException())

    def filePath(path: String): FilePath =
      if (directories.keySet.contains(Directory(path)))
        Directory(path)
      else if (fileSizes.keySet.contains(File(path)))
        File(path)
      else
        throw new FileNotFoundException(path)
  }

  val base = Directory("base")
  val base1 = File(s"${base.path}/1.txt")
  val base2 = File(s"${base.path}/2.txt")
  val subdir = Directory(s"${base.path}/subdir")
  val sub1 = File(s"${subdir.path}/1.txt")
  val sub3 = File(s"${subdir.path}/3.txt")
  val directories = Map(
    base -> List(subdir, base1, base2),
    subdir -> List(sub1, sub3)
  )
  val fileSizes = Map(base1 -> 1L, base2 -> 2L, sub1 -> 1L, sub3 -> 3L)
  val fs = MockFilesystem(directories, fileSizes)

  type R = Fx.fx4[Task, Reader[Filesystem, ?], Reader[ScanConfig, ?], Writer[Log, ?]]

  def run[T](program: Eff[R, T], fs: Filesystem) =
    program.runReader(ScanConfig(2)).runReader(fs).taskAttempt.runWriter.runAsync.runSyncUnsafe(3.seconds)

  val expected = Right(new PathScan(SortedSet(FileSize(sub3, 3), FileSize(base2, 2)), 7, 4))
  val expectedLogs = Set(
    Log.info("Scan started on Directory(base)"),
    Log.debug("Scanning directory 'Directory(base)': 1 subdirectories and 2 files"),
    Log.debug("File base/1.txt Size 1 B"),
    Log.debug("File base/2.txt Size 2 B"),
    Log.debug("Scanning directory 'Directory(base/subdir)': 0 subdirectories and 2 files"),
    Log.debug("File base/subdir/1.txt Size 1 B"),
    Log.debug("File base/subdir/3.txt Size 3 B")
  )

  val (actual, logs) = run(Scanner.pathScan(base), fs)

  "Report Format" ! {actual.mustEqual(expected)}

  "Logs messages are emitted (ignores order due to non-determinstic concurrent execution)" ! {
    logs.forall(expectedLogs.contains)
  }
} 
Example 43
Source File: ScannerSpec.scala    From GettingWorkDoneWithExtensibleEffects   with Apache License 2.0 5 votes vote down vote up
package scan

import java.io.FileNotFoundException
import java.io.IOException
import java.nio.file._

import cats._
import cats.data._
import cats.implicits._
import org.atnos.eff._
import org.atnos.eff.all._
import org.atnos.eff.syntax.all._
import org.atnos.eff.addon.monix._
import org.atnos.eff.addon.monix.task._
import org.atnos.eff.syntax.addon.monix.task._
import org.specs2._

import scala.collection.immutable.SortedSet
import scala.concurrent.duration._
import monix.eval._
import monix.execution.Scheduler.Implicits.global

class ScannerSpec extends mutable.Specification {

  case class MockFilesystem(directories: Map[Directory, List[FilePath]], fileSizes: Map[File, Long]) extends Filesystem {

    def length(file: File) = fileSizes.getOrElse(file, throw new IOException())

    def listFiles(directory: Directory) = directories.getOrElse(directory, throw new IOException())

    def filePath(path: String): FilePath =
      if (directories.keySet.contains(Directory(path)))
        Directory(path)
      else if (fileSizes.keySet.contains(File(path)))
        File(path)
      else
        throw new FileNotFoundException(path)
  }

  val base = Directory("base")
  val base1 = File(s"${base.path}/1.txt")
  val base2 = File(s"${base.path}/2.txt")
  val subdir = Directory(s"${base.path}/subdir")
  val sub1 = File(s"${subdir.path}/1.txt")
  val sub3 = File(s"${subdir.path}/3.txt")
  val directories = Map(
    base -> List(subdir, base1, base2),
    subdir -> List(sub1, sub3)
  )
  val fileSizes = Map(base1 -> 1L, base2 -> 2L, sub1 -> 1L, sub3 -> 3L)
  val fs = MockFilesystem(directories, fileSizes)

  type R = Fx.fx3[Task, Reader[Filesystem, ?], Reader[ScanConfig, ?]]

  def run[T](program: Eff[R, T], fs: Filesystem) =
    program.runReader(ScanConfig(2)).runReader(fs).runAsync.attempt.runSyncUnsafe(3.seconds)

  "file scan" ! {
    val actual = run(Scanner.pathScan(base), fs)
    val expected = Right(new PathScan(SortedSet(FileSize(sub3, 3), FileSize(base2, 2)), 7, 4))

    actual.mustEqual(expected)
  }

  "Error from Filesystem" ! {
    val emptyFs: Filesystem = MockFilesystem(directories, Map.empty)

    val actual = runE(Scanner.scanReport(Array("base", "10")), emptyFs)
    val expected =  Left(new IOException().toString)

    actual.mustEqual(expected)
  }

  type E = Fx.fx3[Task, Reader[Filesystem, ?], Either[String, ?]]
  def runE[T](program: Eff[E, T], fs: Filesystem) =
    //there are two nested Either in the stack, one from Exceptions and one from errors raised by the program
    //we convert to a common error type String then flatten
    program.runReader(fs).runEither.runAsync.attempt.runSyncUnsafe(3.seconds).leftMap(_.toString).flatten

  "Error - Report with non-numeric input" ! {
    val actual = runE(Scanner.scanReport(Array("base", "not a number")), fs)
    val expected = Left("Number of files must be numeric: not a number")

    actual.mustEqual(expected)
  }

  "Error - Report with non-positive input" ! {
    val actual = runE(Scanner.scanReport(Array("base", "-1")), fs)
    val expected = Left("Invalid number of files -1")

    actual.mustEqual(expected)
  }
}