java.io.FileInputStream Scala Examples

The following examples show how to use java.io.FileInputStream. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: KeyUtils.scala    From daml   with Apache License 2.0 6 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.jwt

import java.io.{File, FileInputStream}
import java.nio.charset.StandardCharsets
import java.nio.file.Files
import java.security.cert.CertificateFactory
import java.security.interfaces.{ECPublicKey, RSAPrivateKey, RSAPublicKey}
import java.security.spec.PKCS8EncodedKeySpec
import java.security.KeyFactory

import com.daml.lf.data.TryOps.Bracket.bracket
import scalaz.Show
import scalaz.syntax.show._

import scala.util.Try

object KeyUtils {
  final case class Error(what: Symbol, message: String)

  object Error {
    implicit val showInstance: Show[Error] =
      Show.shows(e => s"KeyUtils.Error: ${e.what}, ${e.message}")
  }

  private val mimeCharSet = StandardCharsets.ISO_8859_1

  
  def generateJwks(keys: Map[String, RSAPublicKey]): String = {
    def generateKeyEntry(keyId: String, key: RSAPublicKey): String =
      s"""    {
         |      "kid": "$keyId",
         |      "kty": "RSA",
         |      "alg": "RS256",
         |      "use": "sig",
         |      "e": "${java.util.Base64.getUrlEncoder
           .encodeToString(key.getPublicExponent.toByteArray)}",
         |      "n": "${java.util.Base64.getUrlEncoder.encodeToString(key.getModulus.toByteArray)}"
         |    }""".stripMargin

    s"""
       |{
       |  "keys": [
       |${keys.toList.map { case (keyId, key) => generateKeyEntry(keyId, key) }.mkString(",\n")}
       |  ]
       |}
    """.stripMargin
  }
} 
Example 2
Source File: ClassUtils.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.it.code

import java.io.FileInputStream
import java.lang.reflect.Modifier
import java.util.jar.JarInputStream
import java.util.regex.Pattern

import org.junit.Test

import scala.jdk.CollectionConverters._

private[code] object ClassUtils {
  
  def classesInProductionScope(): Seq[Class[_]] = allClasses(n => !n.contains("tests.jar"))

  def allClasses(fileNameFilter: String => Boolean): Seq[Class[_]] = {
    val classLoader = ClassLoader.getSystemClassLoader
    val path        = "oharastream/ohara"
    val pattern     = Pattern.compile("^file:(.+\\.jar)!/" + path + "$")
    val urls        = classLoader.getResources(path)
    urls.asScala
      .map(url => pattern.matcher(url.getFile))
      .filter(_.find())
      .map(_.group(1))
      .filter(fileNameFilter)
      .flatMap { f =>
        val jarInput = new JarInputStream(new FileInputStream(f))
        try Iterator
          .continually(jarInput.getNextJarEntry)
          .takeWhile(_ != null)
          .map(_.getName)
          .toArray
          .filter(_.endsWith(".class"))
          .map(_.replace('/', '.'))
          .map(className => className.substring(0, className.length - ".class".length))
          .map(Class.forName)
        finally jarInput.close()
      }
      .toSeq
  }
} 
Example 3
Source File: IntegrityCheckV2.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.ledger.participant.state.kvutils.tools

import java.io.{DataInputStream, FileInputStream}

import com.daml.ledger.participant.state.kvutils.export.LedgerDataExporter

object IntegrityCheckV2 {
  def main(args: Array[String]): Unit = {
    if (args.length != 1) {
      println("usage: integrity-check <ledger dump file>")
      println(
        s"You can produce a ledger dump on a kvutils ledger by setting ${LedgerDataExporter.EnvironmentVariableName}=/path/to/file")
      sys.exit(1)
    }

    val filename = args(0)
    println(s"Verifying integrity of $filename...")
    val ledgerDumpStream =
      new DataInputStream(new FileInputStream(filename))
    new IntegrityChecker(LogAppendingCommitStrategySupport).run(ledgerDumpStream)
    sys.exit(0)
  }
} 
Example 4
Source File: ZipBombDetectionSpec.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.lf.archive

import java.io.FileInputStream
import java.util.zip.ZipInputStream

import com.daml.bazeltools.BazelRunfiles
import org.scalatest.{FlatSpec, Matchers, TryValues}

final class ZipBombDetectionSpec extends FlatSpec with Matchers with TryValues {

  private def bomb: ZipInputStream =
    new ZipInputStream(
      new FileInputStream(BazelRunfiles.rlocation("daml-lf/archive/DarReaderTest.dar"))
    )

  "DarReader" should "reject a zip bomb with the proper error" in {
    DarReader()
      .readArchive("t", bomb, entrySizeThreshold = 1024)
      .failure
      .exception shouldBe a[Errors.ZipBomb]
  }

  "UniversalArchiveReader" should "reject a zip bomb with the proper error" in {
    UniversalArchiveReader(entrySizeThreshold = 1024)
      .readDarStream("t", bomb)
      .failure
      .exception shouldBe a[Errors.ZipBomb]
  }

} 
Example 5
Source File: MetricsConfig.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.metrics

import java.io.{FileInputStream, InputStream}
import java.util.Properties

import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.util.matching.Regex

import org.apache.spark.SparkConf
import org.apache.spark.internal.Logging
import org.apache.spark.util.Utils

private[spark] class MetricsConfig(conf: SparkConf) extends Logging {

  private val DEFAULT_PREFIX = "*"
  private val INSTANCE_REGEX = "^(\\*|[a-zA-Z]+)\\.(.+)".r
  private val DEFAULT_METRICS_CONF_FILENAME = "metrics.properties"

  private[metrics] val properties = new Properties()
  private[metrics] var perInstanceSubProperties: mutable.HashMap[String, Properties] = null

  private def setDefaultProperties(prop: Properties) {
    prop.setProperty("*.sink.servlet.class", "org.apache.spark.metrics.sink.MetricsServlet")
    prop.setProperty("*.sink.servlet.path", "/metrics/json")
    prop.setProperty("master.sink.servlet.path", "/metrics/master/json")
    prop.setProperty("applications.sink.servlet.path", "/metrics/applications/json")
  }

  
  private[this] def loadPropertiesFromFile(path: Option[String]): Unit = {
    var is: InputStream = null
    try {
      is = path match {
        case Some(f) => new FileInputStream(f)
        case None => Utils.getSparkClassLoader.getResourceAsStream(DEFAULT_METRICS_CONF_FILENAME)
      }

      if (is != null) {
        properties.load(is)
      }
    } catch {
      case e: Exception =>
        val file = path.getOrElse(DEFAULT_METRICS_CONF_FILENAME)
        logError(s"Error loading configuration file $file", e)
    } finally {
      if (is != null) {
        is.close()
      }
    }
  }

} 
Example 6
Source File: Main.scala    From ros_hadoop   with Apache License 2.0 5 votes vote down vote up
package de.valtech.foss

import scala.io.Source
import scala.collection.mutable.Map
import scala.collection.mutable.ListBuffer
import scala.collection.JavaConverters._
import Console.{GREEN, RED, RESET}
import scala.language.reflectiveCalls

import java.io.File
import java.io.FileInputStream
import java.io.FileOutputStream
import java.nio.channels.FileChannel.MapMode._
import java.nio.ByteOrder._
import java.nio.ByteBuffer

import de.valtech.foss.proto.RosbagIdxOuterClass.RosbagIdx

object Main extends App {
  def help() = {
    Console.err.printf(s"""
${RESET}${GREEN}Usage:
	--file <ros.bag> file to process
	--version print Rosbag version and exit
	--offset <offset> --number <records> Seek at offset < 1073741824 and read the specified number of records
${RESET}By default will just create the protobuf idx file needed for configuration.\n\n""")
    sys.exit(0)
  }

  val pargs = Map[String,AnyRef]()
  def process_cli(args: List[String]) :Boolean = args match {
    case Nil => true // parse success
    case "-v" :: rest => pargs += ("version" -> Some(true)); process_cli(rest)
    case "--version" :: rest => pargs += ("version" -> Some(true)); process_cli(rest)
    case "-f" :: x :: rest => pargs += ("file" -> x); process_cli(rest)
    case "--file" :: x :: rest => pargs += ("file" -> x); process_cli(rest)
    case "-n" :: x :: rest => pargs += ("number" -> Some(x.toInt)); process_cli(rest)
    case "--number" :: x :: rest => pargs += ("number" -> Some(x.toInt)); process_cli(rest)
    case "-o" :: x :: rest => pargs += ("offset" -> Some(x.toInt)); process_cli(rest)
    case "--offset" :: x :: rest => pargs += ("offset" -> Some(x.toInt)); process_cli(rest)
    case "-h" :: rest => help(); false
    case "--help" :: rest => help(); false
    case _ => Console.err.printf(s"${RESET}${RED}Unknown argument " + args.head); false
  }
  process_cli(args.toList)

  def use[T <: { def close() }]
    (resource: T)
    (code: T ⇒ Unit) =
    try
      code(resource)
    finally
      resource.close()

  pargs("file") match {
    case f:String => process()
    case _ => help()
  }

  def process(): Unit = {
    val fin = new File(pargs("file").asInstanceOf[String])
    use(new FileInputStream(fin)) { stream => {
      //printf("min: %s\n", Math.min(1073741824, fin.length) )
      val buffer = stream.getChannel.map(READ_ONLY, 0, Math.min(1073741824, fin.length)).order(LITTLE_ENDIAN)
      val p:RosbagParser = new RosbagParser(buffer)
      val version = p.read_version()
      val h = p.read_record().get
      if(pargs contains "version") {
        printf("%s\n%s\n\n", version, h)
        return
      }
      if(pargs contains "number"){
        buffer position pargs.getOrElse("offset",None).asInstanceOf[Option[Int]].getOrElse(0)
        for(i <- List.range(0,pargs("number").asInstanceOf[Option[Int]].getOrElse(0)))
          println(p.read_record)
        return
      }
      val idxpos = h.header.fields("index_pos").asInstanceOf[Long]
      //printf("idxpos: %s %s\n", idxpos, Math.min(1073741824, fin.length) )
      val b = stream.getChannel.map(READ_ONLY, idxpos, Math.min(1073741824, fin.length - idxpos)).order(LITTLE_ENDIAN)
      val pp:RosbagParser = new RosbagParser(b)
      val c = pp.read_connections(h.header, Nil)
      val chunk_idx = pp.read_chunk_infos(c)
      Console.err.printf(s"""${RESET}${GREEN}Found: """
          + chunk_idx.size
          +s""" chunks\n${RESET}It should be the same number reported by rosbag tool.\nIf you encounter any issues try reindexing your file and submit an issue.
          ${RESET}\n""")
      val fout = new FileOutputStream(pargs("file").asInstanceOf[String] + ".idx.bin")
      val builder = RosbagIdx.newBuilder
      for(i <- chunk_idx) builder.addArray(i)
      builder.build().writeTo(fout)
      fout.close()
      //printf("[%s]\n",chunk_idx.toArray.mkString(","))
    }}
  }
} 
Example 7
Source File: SentenceTokenizer.scala    From BigDL   with Apache License 2.0 5 votes vote down vote up
package com.intel.analytics.bigdl.dataset.text

import java.io.FileInputStream
import java.net.{URI, URL}

import com.intel.analytics.bigdl.dataset.Transformer

import scala.collection.Iterator
import opennlp.tools.tokenize.{SimpleTokenizer, Tokenizer, TokenizerME, TokenizerModel}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}



class SentenceTokenizer(tokenFile: Option[String] = None)
  extends Transformer[String, Array[String]] {

  var modelIn: FileInputStream = _
  var model: TokenizerModel = _

  var tokenizer: Tokenizer = _

  def this(tokenFile: URL) {
    this(Some(tokenFile.getPath))
  }

  def close(): Unit = {
    if (modelIn != null) {
      modelIn.close()
    }
  }

  override def apply(prev: Iterator[String]): Iterator[Array[String]] =
    prev.map(x => {
      if (tokenizer == null) {
        if (!tokenFile.isDefined) {
          tokenizer = SimpleTokenizer.INSTANCE
        } else {
          val src: Path = new Path(tokenFile.get)
          val fs = src.getFileSystem(new Configuration())
          val in = fs.open(src)
          model = new TokenizerModel(in)
          tokenizer = new TokenizerME(model)
        }
      }
      val words = tokenizer.tokenize(x)
      words
    })
}

object SentenceTokenizer {
  def apply(tokenFile: Option[String] = None):
    SentenceTokenizer = new SentenceTokenizer(tokenFile)
  def apply(tokenFile: URL):
    SentenceTokenizer = new SentenceTokenizer(tokenFile)
} 
Example 8
Source File: SentenceSplitter.scala    From BigDL   with Apache License 2.0 5 votes vote down vote up
package com.intel.analytics.bigdl.dataset.text

import java.io.FileInputStream
import java.net.{URI, URL}

import com.intel.analytics.bigdl.dataset.Transformer
import opennlp.tools.sentdetect.{SentenceDetector, SentenceDetectorME, SentenceModel}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}

import scala.collection.Iterator


class SentenceSplitter(sentFile: Option[String] = None)
  extends Transformer[String, Array[String]] {

  var modelIn: FileInputStream = _
  var model: SentenceModel = _
  var sentenceDetector: SentenceDetector = _

  def this(sentFileURL: URL) {
    this(Some(sentFileURL.getPath))
  }

  def this(sentFile: String) {
    this(Some(sentFile))
  }

  def close(): Unit = {
    if (modelIn != null) {
      modelIn.close()
    }
  }

  override def apply(prev: Iterator[String]): Iterator[Array[String]] =
    prev.map(x => {
      if (!sentFile.isDefined) {
        x.split('.')
      } else {
        if (sentenceDetector == null) {
          val src: Path = new Path(sentFile.get)
          val fs = src.getFileSystem(new Configuration())
          val in = fs.open(src)

          model = new SentenceModel(in)
          sentenceDetector = new SentenceDetectorME(model)
        }
        sentenceDetector.sentDetect(x)
      }
    })
}

object SentenceSplitter {
  def apply(sentFile: Option[String] = None):
    SentenceSplitter = new SentenceSplitter(sentFile)
  def apply(sentFileURL: URL):
    SentenceSplitter = new SentenceSplitter(sentFileURL)
  def apply(sentFile: String):
  SentenceSplitter = new SentenceSplitter(sentFile)
} 
Example 9
Source File: FixedLengthRecordReader.scala    From BigDL   with Apache License 2.0 5 votes vote down vote up
package com.intel.analytics.bigdl.utils.tf

import java.io.{File, FileInputStream}


class FixedLengthRecordReader(fileName: File,
                              footerBytes: Int,
                              headerBytes: Int,
                              hopBytes: Int,
                              recordBytes: Int) extends Iterator[Array[Byte]] {

  private val inputStream = new FileInputStream(fileName)

  private var dataBuffer: Array[Byte] = null

  inputStream.skip(headerBytes)


  override def hasNext: Boolean = {
    if (dataBuffer != null) {
      true
    } else {
      dataBuffer = new Array[Byte](recordBytes)
      val numOfBytes = inputStream.read(dataBuffer)
      if (numOfBytes == recordBytes) {
        inputStream.skip(hopBytes)
        true
      } else {
        inputStream.close()
        false
      }
    }
  }

  override def next(): Array[Byte] = {
    if (hasNext) {
      val data = this.dataBuffer
      this.dataBuffer = null
      data
    } else {
      throw new NoSuchElementException("next on empty iterator")
    }
  }
} 
Example 10
Source File: TFRecordIterator.scala    From BigDL   with Apache License 2.0 5 votes vote down vote up
package com.intel.analytics.bigdl.utils.tf

import java.io.{BufferedInputStream, File, FileInputStream, InputStream}
import java.nio.{ByteBuffer, ByteOrder}


class TFRecordIterator(inputStream: InputStream) extends Iterator[Array[Byte]] {

  private var dataBuffer: Array[Byte] = null

  private val lengthBuffer: Array[Byte] = new Array[Byte](8)



  override def hasNext: Boolean = {
    if (dataBuffer != null) {
      true
    } else {
      val numOfBytes = inputStream.read(lengthBuffer)
      if (numOfBytes == 8) {
        val lengthWrapper = ByteBuffer.wrap(lengthBuffer)
        lengthWrapper.order(ByteOrder.LITTLE_ENDIAN)
        val length = lengthWrapper.getLong().toInt
        // todo, do crc check, simply skip now
        inputStream.skip(4)

        dataBuffer = new Array[Byte](length)
        inputStream.read(dataBuffer)
        // todo, do crc check, simply skip now
        inputStream.skip(4)
        true
      } else {
        inputStream.close()
        false
      }
    }
  }

  override def next(): Array[Byte] = {
    if (hasNext) {
      val data = this.dataBuffer
      this.dataBuffer = null
      data
    } else {
      throw new NoSuchElementException("next on empty iterator")
    }
  }
}

object TFRecordIterator {
  def apply(file: File): TFRecordIterator = {
    val inputStream = new FileInputStream(file)
    new TFRecordIterator(inputStream)
  }
} 
Example 11
Source File: COCODatasetSpec.scala    From BigDL   with Apache License 2.0 5 votes vote down vote up
package com.intel.analytics.bigdl.dataset.segmentation

import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.transform.vision.image.{ImageFeature, RoiImageInfo}
import com.intel.analytics.bigdl.transform.vision.image.label.roi.RoiLabel
import java.awt.image.DataBufferByte
import java.io.{File, FileInputStream}
import javax.imageio.ImageIO
import org.scalatest.{BeforeAndAfter, FlatSpec, Matchers}

class COCODatasetSpec extends FlatSpec with Matchers with BeforeAndAfter {

  private def processPath(path: String): String = {
    if (path.contains(":")) {
      path.substring(1)
    } else {
      path
    }
  }

  val resourcePath: String = processPath(getClass.getClassLoader.getResource("coco").getPath)
  val dataSet: COCODataset = COCODataset.load(resourcePath
      + File.separator + "cocomini.json", resourcePath)

  "COCODataset" should "correctly be loaded" in {
    dataSet.images.length should be (5)
    dataSet.annotations.length should be (6)
    val cateIdx = Array(53, 53, 53, 1, 19, 1).toIterator
    val sizes = Array((428, 640), (480, 640), (427, 640), (480, 640), (427, 640)).toIterator
    for (anno <- dataSet.annotations) {
      anno.image.id should be (anno.imageId)
      dataSet.categoryId2Idx(anno.categoryId) should be (cateIdx.next())
      anno.categoryIdx should be (dataSet.categoryId2Idx(anno.categoryId))
      if (anno.isCrowd) {
        anno.segmentation.isInstanceOf[COCORLE] should be (true)
      } else {
        anno.segmentation.isInstanceOf[COCOPoly] should be (true)
        val poly = anno.segmentation.asInstanceOf[COCOPoly]
        poly.height should be (anno.image.height)
        poly.width should be (anno.image.width)
      }
    }
    for (img <- dataSet.images) {
      val size = sizes.next()
      img.height should be (size._1)
      img.width should be (size._2)
    }
    for (i <- 1 to dataSet.categories.length) {
      val cate = dataSet.getCategoryByIdx(i)
      dataSet.categoryId2Idx(cate.id) should be (i)
    }
  }

  "COCODataset.toImageFeatures" should "correctly work" in {
    val cateIdx = Array(1, 19, 53, 53, 53, 1).toIterator
    val sizes = Array((428, 640, 3), (480, 640, 3), (427, 640, 3), (480, 640, 3),
      (427, 640, 3)).toIterator
    val uri = Array("COCO_val2014_000000153344.jpg", "COCO_val2014_000000091136.jpg",
      "COCO_val2014_000000558840.jpg", "COCO_val2014_000000200365.jpg",
      "COCO_val2014_000000374530.jpg"
    ).toIterator
    val isCrowd = Array(1f, 1f, 0f, 0f, 0f, 1f).toIterator
    dataSet.toImageFeatures.foreach(imf => {
      imf.getOriginalSize should be (sizes.next())
      val iscr = imf[Tensor[Float]](RoiImageInfo.ISCROWD)

      val roilabel = imf.getLabel[RoiLabel]
      roilabel.classes.size() should be (iscr.size())
      for(i <- 1 to iscr.nElement()) {
        iscr.valueAt(i) should be (isCrowd.next())
        roilabel.classes.valueAt(i) should be (cateIdx.next())
      }
      roilabel.bboxes.size() should be (Array(roilabel.classes.size(1), 4))

      val inputStream = new FileInputStream(resourcePath + File.separator + uri.next())
      val image = ImageIO.read(inputStream)
      val rawdata = image.getRaster.getDataBuffer.asInstanceOf[DataBufferByte].getData()
      require(java.util.Arrays.equals(rawdata, imf[Array[Byte]](ImageFeature.bytes)))
    })
  }

  "COCOImage.toTable" should "correctly work" in {
    val cateIdx = Array(1, 19, 53, 53, 53, 1).toIterator
    val sizes = Array((428, 640, 3), (480, 640, 3), (427, 640, 3), (480, 640, 3),
      (427, 640, 3)).toIterator
    val isCrowd = Array(1f, 1f, 0f, 0f, 0f, 1f).toIterator
    dataSet.images.map(_.toTable).foreach(tab => {
      RoiImageInfo.getOrigSize(tab) should be (sizes.next())
      val iscr = RoiImageInfo.getIsCrowd(tab)
      val classes = RoiImageInfo.getClasses(tab)
      classes.size() should be (iscr.size())
      for(i <- 1 to iscr.nElement()) {
        iscr.valueAt(i) should be (isCrowd.next())
        classes.valueAt(i) should be (cateIdx.next())
      }
      RoiImageInfo.getBBoxes(tab).size() should be (Array(classes.size(1), 4))

    })
  }

} 
Example 12
Source File: JsonRpcHttpsServer.scala    From mantis   with Apache License 2.0 5 votes vote down vote up
package io.iohk.ethereum.jsonrpc.server

import java.io.{File, FileInputStream}
import java.security.{KeyStore, SecureRandom}
import javax.net.ssl.{KeyManagerFactory, SSLContext, TrustManagerFactory}

import akka.actor.ActorSystem
import akka.http.scaladsl.model.headers.HttpOriginRange
import akka.http.scaladsl.{ConnectionContext, Http}
import akka.stream.ActorMaterializer
import io.iohk.ethereum.jsonrpc.JsonRpcController
import io.iohk.ethereum.jsonrpc.server.JsonRpcHttpsServer.HttpsSetupResult
import io.iohk.ethereum.jsonrpc.server.JsonRpcServer.JsonRpcServerConfig
import io.iohk.ethereum.utils.Logger

import scala.concurrent.ExecutionContext.Implicits.global
import scala.io.Source
import scala.util.{Failure, Success, Try}

class JsonRpcHttpsServer(val jsonRpcController: JsonRpcController, config: JsonRpcServerConfig,
                         secureRandom: SecureRandom)(implicit val actorSystem: ActorSystem)
  extends JsonRpcServer with Logger {

  def run(): Unit = {
    implicit val materializer = ActorMaterializer()

    val maybeSslContext = validateCertificateFiles(config.certificateKeyStorePath, config.certificateKeyStoreType, config.certificatePasswordFile).flatMap{
      case (keystorePath, keystoreType, passwordFile) =>
        val passwordReader = Source.fromFile(passwordFile)
        try {
          val password = passwordReader.getLines().mkString
          obtainSSLContext(keystorePath, keystoreType, password)
        } finally {
          passwordReader.close()
        }
    }

    val maybeHttpsContext = maybeSslContext.map(sslContext => ConnectionContext.https(sslContext))

    maybeHttpsContext match {
      case Right(httpsContext) =>
        Http().setDefaultServerHttpContext(httpsContext)
        val bindingResultF = Http().bindAndHandle(route, config.interface, config.port, connectionContext = httpsContext)

        bindingResultF onComplete {
          case Success(serverBinding) => log.info(s"JSON RPC HTTPS server listening on ${serverBinding.localAddress}")
          case Failure(ex) => log.error("Cannot start JSON HTTPS RPC server", ex)
        }
      case Left(error) => log.error(s"Cannot start JSON HTTPS RPC server due to: $error")
    }
  }

  
  private def validateCertificateFiles(maybeKeystorePath: Option[String],
                                       maybeKeystoreType: Option[String],
                                       maybePasswordFile: Option[String]): HttpsSetupResult[(String, String, String)] =
    (maybeKeystorePath, maybeKeystoreType, maybePasswordFile) match {
      case (Some(keystorePath), Some(keystoreType), Some(passwordFile)) =>
        val keystoreDirMissing = !new File(keystorePath).isFile
        val passwordFileMissing = !new File(passwordFile).isFile
        if(keystoreDirMissing && passwordFileMissing)
          Left("Certificate keystore path and password file configured but files are missing")
        else if(keystoreDirMissing)
          Left("Certificate keystore path configured but file is missing")
        else if(passwordFileMissing)
          Left("Certificate password file configured but file is missing")
        else
          Right((keystorePath, keystoreType, passwordFile))
      case _ =>
        Left("HTTPS requires: certificate-keystore-path, certificate-keystore-type and certificate-password-file to be configured")
    }

  override def corsAllowedOrigins: HttpOriginRange = config.corsAllowedOrigins
}

object JsonRpcHttpsServer {
  type HttpsSetupResult[T] = Either[String, T]
} 
Example 13
Source File: HdfsFileWriter.scala    From ArchiveSpark   with MIT License 5 votes vote down vote up
package org.archive.archivespark.sparkling.io

import java.io.{FileInputStream, FileOutputStream, OutputStream}

import org.apache.hadoop.fs.Path
import org.archive.archivespark.sparkling.logging.{Log, LogContext}

import scala.util.Try

class HdfsFileWriter private(filename: String, append: Boolean, replication: Short) extends OutputStream {
  implicit val logContext: LogContext = LogContext(this)

  private val file = IOUtil.tmpFile

  Log.info("Writing to temporary local file " + file.getCanonicalPath + " (" + filename + ")...")

  val out = new FileOutputStream(file)

  override def close(): Unit = {
    Try { out.close() }
    Log.info("Copying from temporary file " + file.getCanonicalPath + " to " + filename + "...")
    if (append) {
      val in = new FileInputStream(file)
      val appendOut = HdfsIO.fs.append(new Path(filename))
      IOUtil.copy(in, appendOut)
      appendOut.close()
      in.close()
      file.delete()
    } else HdfsIO.copyFromLocal(file.getCanonicalPath, filename, move = true, overwrite = true, replication)
    Log.info("Done. (" + filename + ")")
  }

  override def write(b: Int): Unit = out.write(b)
  override def write(b: Array[Byte]): Unit = out.write(b)
  override def write(b: Array[Byte], off: Int, len: Int): Unit = out.write(b, off, len)
  override def flush(): Unit = out.flush()
}

object HdfsFileWriter {
  def apply(filename: String, overwrite: Boolean = false, append: Boolean = false, replication: Short = 0): HdfsFileWriter = {
    if (!overwrite && !append) HdfsIO.ensureNewFile(filename)
    new HdfsFileWriter(filename, append, replication)
  }
} 
Example 14
Source File: AccountStorage.scala    From matcher   with MIT License 5 votes vote down vote up
package com.wavesplatform.dex.db

import java.io.{File, FileInputStream, FileOutputStream}
import java.nio.file.Files
import java.util.Base64

import cats.syntax.either._
import com.google.common.primitives.{Bytes, Ints}
import com.wavesplatform.dex.crypto.Enigma
import com.wavesplatform.dex.db.AccountStorage.Settings.EncryptedFile
import com.wavesplatform.dex.domain.account.KeyPair
import com.wavesplatform.dex.domain.bytes.ByteStr
import com.wavesplatform.dex.domain.crypto
import net.ceedubs.ficus.readers.ValueReader

import scala.collection.mutable.ArrayBuffer

case class AccountStorage(keyPair: KeyPair)

object AccountStorage {

  sealed trait Settings

  object Settings {

    case class InMem(seed: ByteStr)                        extends Settings
    case class EncryptedFile(path: File, password: String) extends Settings

    implicit val valueReader: ValueReader[Settings] = ValueReader.relative[Settings] { config =>
      config.getString("type") match {
        case "in-mem" => InMem(Base64.getDecoder.decode(config.getString("in-mem.seed-in-base64")))
        case "encrypted-file" =>
          EncryptedFile(
            path = new File(config.getString("encrypted-file.path")),
            password = config.getString("encrypted-file.password")
          )
        case x => throw new IllegalArgumentException(s"The type of account storage '$x' is unknown. Please update your settings.")
      }
    }
  }

  def load(settings: Settings): Either[String, AccountStorage] = settings match {
    case Settings.InMem(seed) => Right(AccountStorage(KeyPair(seed)))
    case Settings.EncryptedFile(file, password) =>
      if (file.isFile) {
        val encryptedSeedBytes = readFile(file)
        val key                = Enigma.prepareDefaultKey(password)
        val decryptedBytes     = Enigma.decrypt(key, encryptedSeedBytes)
        AccountStorage(KeyPair(decryptedBytes)).asRight
      } else s"A file '${file.getAbsolutePath}' doesn't exist".asLeft
  }

  def save(seed: ByteStr, to: EncryptedFile): Unit = {
    Files.createDirectories(to.path.getParentFile.toPath)
    val key                = Enigma.prepareDefaultKey(to.password)
    val encryptedSeedBytes = Enigma.encrypt(key, seed.arr)
    writeFile(to.path, encryptedSeedBytes)
  }

  def getAccountSeed(baseSeed: ByteStr, nonce: Int): ByteStr = ByteStr(crypto.secureHash(Bytes.concat(Ints.toByteArray(nonce), baseSeed)))

  def readFile(file: File): Array[Byte] = {
    val reader = new FileInputStream(file)
    try {
      val buff = new Array[Byte](1024)
      val r    = new ArrayBuffer[Byte]
      while (reader.available() > 0) {
        val read = reader.read(buff)
        if (read > 0) {
          r.appendAll(buff.iterator.take(read))
        }
      }
      r.toArray
    } finally {
      reader.close()
    }
  }

  def writeFile(file: File, bytes: Array[Byte]): Unit = {
    val writer = new FileOutputStream(file, false)
    try writer.write(bytes)
    finally writer.close()
  }
} 
Example 15
Source File: XLSUtil.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.util
import java.io.{File, FileInputStream}

import org.apache.poi.hssf.usermodel.HSSFWorkbook
import org.apache.poi.ss.usermodel.Workbook
import org.apache.poi.xssf.usermodel.XSSFWorkbook

object XLSUtil {
  def processFile (file : File) : Workbook = {
    val split = file.getName.split("\\.") //.是特殊字符,需要转义!!!!!
    var wb : Workbook = null
    //根据文件后缀(xls/xlsx)进行判断
    if ("xls" == split(1)) {
      val fis = new FileInputStream(file) //文件流对象
      wb = new HSSFWorkbook(fis)
    }
    else if ("xlsx" == split(1)) wb = new XSSFWorkbook(file);
    else {
      throw new Exception("文件类型错误!")
    }
    wb
  }
} 
Example 16
Source File: ServerIpUtil.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.util

import java.io.{File, FileInputStream, InputStream}
import java.net.InetAddress
import java.util.Properties

object ServerIpUtil {
  private val prop: Properties = new Properties()
  var fis: InputStream = null
  var path :String = ""

    try{

    val userDir = System.getProperty("user.dir")
    path = userDir + "/server.ip"
    val file = new File(path)
    if(!file.exists()){
      file.createNewFile()
    }
    prop.load(new FileInputStream(path))
  } catch{
    case ex: Exception => ex.printStackTrace()
  }

  def getServerIpFile() : String = {
    path
  }


  def getServerIp(): String ={
    val obj = prop.get("server.ip")
    if(obj != null){
      return obj.toString
    }
    null
  }

  def main(args: Array[String]): Unit = {

    val ip = InetAddress.getLocalHost.getHostAddress
    //write ip to server.ip file
    FileUtil.writeFile("server.ip=" + ip, ServerIpUtil.getServerIpFile())
    println(ServerIpUtil.getServerIp())
  }
} 
Example 17
Source File: PropertyUtil.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.util

import java.io.{FileInputStream, InputStream}
import java.util.Properties

object PropertyUtil {
  private val prop: Properties = new Properties()
  var fis: InputStream = null
  var path :String = ""
  var classPath:String = ""
  var scalaPath:String = ""
    try{
    //val path = Thread.currentThread().getContextClassLoader.getResource("config.properties").getPath
    //fis = this.getClass.getResourceAsStream("")
    val userDir = System.getProperty("user.dir")
    path = userDir + "/config.properties"
    classPath = userDir + "/classpath/"
    scalaPath = userDir + "/scala"
    prop.load(new FileInputStream(path))
  } catch{
    case ex: Exception => ex.printStackTrace()
  }

  def getConfigureFile() : String = {
    path
  }

  def getClassPath():String = {
    classPath
  }

  def getScalaPath():String = {
    scalaPath
  }

  def getPropertyValue(propertyKey: String): String ={
    val obj = prop.get(propertyKey)
    if(obj != null){
      return obj.toString
    }
    null
  }

  def getIntPropertyValue(propertyKey: String): Int ={
    val obj = prop.getProperty(propertyKey)
    if(obj != null){
      return obj.toInt
    }
    throw new NullPointerException
  }

} 
Example 18
Source File: ImageUtil.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.conf.util

import java.io.{BufferedInputStream, FileInputStream}

import com.sksamuel.scrimage.Image


object ImageUtil {

  def getImage(imagePath:String) : Array[Byte] = {
    try{
      val classLoader = this.getClass.getClassLoader
      val imageInputStream = classLoader.getResourceAsStream(imagePath)
      val input = new BufferedInputStream(imageInputStream)
      Image.fromStream(input).bytes
    }catch {
      case ex => println(ex); Array[Byte]()
    }
  }

  def saveImage(imageBytes :  Array[Byte], savePath : String) = {
    Image(imageBytes).output(savePath)
  }

} 
Example 19
Source File: PipeInfoRead.scala    From daf   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package it.gov.daf.ingestion.pipelines

import java.io.FileInputStream
import com.typesafe.config.ConfigFactory
import ingestion_manager.yaml.PipelineInfo
import play.api.libs.json._
import data.PipelineClass.pipelineInfoReads

object PipelineInfoRead {
  val pipeInfoFile = ConfigFactory.load().getString("ingmgr.pipeinfo.datapath")

  def pipelineInfo(): List[PipelineInfo] = {
    val stream = new FileInputStream(pipeInfoFile)
    val pipeInfoOpt: Option[List[PipelineInfo]] = try { Json.parse(stream).asOpt[List[PipelineInfo]] } finally { stream.close() }
    pipeInfoOpt match {
      case Some(s) => s
      case None => List()
    }
  }

  def pipelineInfoByCat(category: String): List[PipelineInfo] = {
    val pipelineList = pipelineInfo()
    pipelineList.filter(_.category.equals(category))
  }
  def pipelineInfoById(id: String): List[PipelineInfo] = {
    val pipelineList = pipelineInfo()
    pipelineList.filter(_.id.equals(id))
  }

} 
Example 20
Source File: TestJson.scala    From daf   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package it.gov.daf.catalogmanager.repository.voc

import java.io.FileInputStream

import catalog_manager.yaml.KeyValue
import play.api.libs.json.{JsArray, JsValue}

object TestJson extends App {
  import play.api.libs.json.{JsError, JsResult, JsSuccess, Json}

  val stream = new FileInputStream("data/voc/cv_theme-subtheme_bk.json")
  val json = try { (Json.parse(stream) \ "voc").asOpt[JsArray]} finally {stream.close()}
  val dcatapitThemeId = "AGRI"
  val subthemeId = "policy"
  print {
    json match {
      case Some(s) => s.value.map(x => ((x \ "theme_code").as[String],
        (x \ "subthemes").as[List[JsValue]])).map{ x=>

        x._2.map{ y=> //subthemes

          (x._1, (y \ "subthemes_ita").as[List[List[String]]])
        }
      }.flatten
        .filter(x=>x._1.equals(dcatapitThemeId))
        .map{x=>
          println(x)
          x._2.map{y=>
            println(y)
            println(y.length)
            //println(y(0))
            //println(y(1))
            KeyValue(y(0), y(1))
          }
        }.flatMap(x=>x)
      case None =>
        println("VocRepositoryFile - Error occurred with Json")
        Seq()
    }
  }

} 
Example 21
Source File: TokenOccurrenceSource.scala    From dbpedia-spotlight-model   with Apache License 2.0 5 votes vote down vote up
package org.dbpedia.spotlight.db.io

import java.io.{InputStream, FileInputStream, File}
import io.Source
import org.dbpedia.spotlight.db.WikipediaToDBpediaClosure
import org.dbpedia.spotlight.db.model.{ResourceStore, TokenTypeStore}
import org.dbpedia.spotlight.log.SpotlightLog
import scala.Predef._
import scala.Array
import org.dbpedia.spotlight.exceptions.{DBpediaResourceNotFoundException, NotADBpediaResourceException}
import org.dbpedia.spotlight.model.{TokenType, DBpediaResource}
import util.TokenOccurrenceParser




object TokenOccurrenceSource {

  def fromPigInputStream(tokenInputStream: InputStream, tokenTypeStore: TokenTypeStore, wikipediaToDBpediaClosure: WikipediaToDBpediaClosure, resStore: ResourceStore): Iterator[Triple[DBpediaResource, Array[TokenType], Array[Int]]] = {

    var i = 0
    plainTokenOccurrenceSource(tokenInputStream, 0) map {
      case (wikiurl: String, tokens: Array[String], counts: Array[Int]) => {
        i += 1
        if (i % 10000 == 0)
          SpotlightLog.info(this.getClass, "Read context for %d resources...", i)
        try {
          Triple(
            resStore.getResourceByName(wikipediaToDBpediaClosure.wikipediaToDBpediaURI(wikiurl)),
            tokens.map{ token => tokenTypeStore.getTokenType(token) },
            counts
          )
        } catch {
          case e: DBpediaResourceNotFoundException => Triple(null, null, null)
          case e: NotADBpediaResourceException     => Triple(null, null, null)
        }
      }
    }

  }

  def fromPigFile(tokenFile: File, tokenStore: TokenTypeStore, wikipediaToDBpediaClosure: WikipediaToDBpediaClosure, resStore: ResourceStore, minimumCount: Int) = fromPigInputStream(new FileInputStream(tokenFile), tokenStore, wikipediaToDBpediaClosure, resStore)

  val tokensParser = TokenOccurrenceParser.createDefault

  def plainTokenOccurrenceSource(tokenInputStream: InputStream, minimumCount: Int): Iterator[Triple[String, Array[String], Array[Int]]] = {
    Source.fromInputStream(tokenInputStream) getLines() filter(!_.equals("")) map {
      line: String => {
        val Array(wikiurl, tokens) = line.trim().split('\t')
        val Pair(tokensA, countsA) = tokensParser.parse(tokens, minimumCount)
        Triple(wikiurl, tokensA, countsA)
      }
    }
  }
} 
Example 22
Source File: TokenSource.scala    From dbpedia-spotlight-model   with Apache License 2.0 5 votes vote down vote up
package org.dbpedia.spotlight.db.io

import org.dbpedia.spotlight.io.OccurrenceSource
import org.dbpedia.spotlight.db.model.{StringTokenizer, SurfaceFormStore}
import collection.mutable.HashMap
import scala.collection.JavaConversions._
import scala.collection.JavaConverters._
import java.io.{InputStream, FileInputStream, File}
import org.dbpedia.spotlight.log.SpotlightLog
import org.dbpedia.spotlight.model._




object TokenSource {

  private val ADDITIONAL_TOKEN_COUNT = 1

  def fromSFStore(sfStore: SurfaceFormStore, tokenizer: StringTokenizer): Seq[String] = {
    SpotlightLog.info(this.getClass, "Adding all surface form tokens to the TokenStore...")
    sfStore.iterateSurfaceForms.grouped(100000).toList.par.flatMap(_.map{
      sf: SurfaceForm =>
        //Tokenize all SFs first
        tokenizer.tokenize(sf.name)
    }).seq.flatten
  }

  def fromPigFile(tokenFile: File, additionalTokens: Option[Seq[String]] = None, minimumCount: Int) = fromPigInputStream(new FileInputStream(tokenFile), additionalTokens, minimumCount)
  def fromPigInputStream(tokenFile: InputStream, additionalTokens: Option[Seq[String]] = None, minimumCount: Int) = {

    val tokenMap = HashMap[String, Int]()

    var i = 0
    TokenOccurrenceSource.plainTokenOccurrenceSource(tokenFile, minimumCount) foreach {
      p: Triple[String, Array[String], Array[Int]] => {
        i += 1
        if (i % 10000 == 0)
          SpotlightLog.info(this.getClass, "Read context for %d resources...", i)

        (0 to p._2.size -1).foreach {
          i: Int => tokenMap.put(p._2(i), tokenMap.getOrElse(p._2(i), 0) + p._3(i))
        }
      }
    }

    additionalTokens match {
      case Some(tokens) => {
        SpotlightLog.info(this.getClass, "Read %d additional tokens...", tokens.size)
        tokens.foreach { token: String =>
          tokenMap.put(token, tokenMap.getOrElse(token, 0) + ADDITIONAL_TOKEN_COUNT)
        }
      }
      case None =>
    }

    var id = -1
    tokenMap.map{
      case(token, count) => {
        id += 1
        (new TokenType(id, token, count), count)
      }
    }.toMap.asJava
  }

} 
Example 23
Source File: WebSearchConfiguration.scala    From dbpedia-spotlight-model   with Apache License 2.0 5 votes vote down vote up
package org.dbpedia.spotlight.util

import java.io.{File, FileInputStream}
import java.util.Properties

import org.dbpedia.spotlight.exceptions.ConfigurationException
import org.dbpedia.spotlight.log.SpotlightLog



class WebSearchConfiguration (val configFile: File) {

    def this(fileName: String) {
        this(new File(fileName))
    }

    private val properties : Properties = new Properties()

    SpotlightLog.info(this.getClass, "Loading configuration file %s", configFile)
    properties.load(new FileInputStream(configFile))
    validate

    //TODO copied from IndexingConfiguration
    def get(key : String, defaultValue : String) : String = {
        properties.getProperty(key, defaultValue)
    }
    //TODO copied from IndexingConfiguration
    def get(key : String) : String = {
        val value = get(key, null)
        if(value == null) {
            throw new ConfigurationException(key+" not specified in "+configFile)
        }
        value
    }

    //TODO validate yahoo data ...
    private def validate {

        get("org.dbpedia.spotlight.yahoo.appID") // will throw an exception if it cannot find

//        val dumpFile = new File(get("org.dbpedia.spotlight.data.wikipediaDump"))
//        if(!dumpFile.isFile) {
//            throw new ConfigurationException("specified Wikipedia dump not found: "+dumpFile)
//        }

    }
} 
Example 24
Source File: SparkSqlUtils.scala    From HadoopLearning   with MIT License 5 votes vote down vote up
package com.c503.utils

import java.io.{BufferedInputStream, BufferedReader, FileInputStream, InputStreamReader}
import java.nio.file.Path

import com.google.common.io.Resources
import org.apache.log4j.{Level, Logger}
import org.apache.mesos.Protos.Resource
import org.apache.spark.sql.SparkSession

import scala.io.Source


  def readSqlByPath(sqlPath: String) = {
    val buf = new StringBuilder
    val path = this.getPathByName(sqlPath)
    val file = Source.fromFile(path)
    for (line <- file.getLines) {
      buf ++= line + "\n"
    }
    file.close
    buf.toString()
  }


} 
Example 25
Source File: File.scala    From nescala   with GNU General Public License v2.0 5 votes vote down vote up
package com.owlandrews.nescala.helpers

import com.owlandrews.nescala.Console

object File {
  import java.io.File
  import java.net.URL
  import java.io.{FileFilter, FileInputStream, FileOutputStream, ObjectInputStream, ObjectOutputStream}
  import javax.imageio.ImageIO

  import scala.util.Try
  import scala.xml.XML
  import scala.language.postfixOps

  import sys.process._

  import com.typesafe.config.ConfigFactory

  def Download(url: String, filename: String) = (for{
    url <- Try(new URL(url))
    conn <- Try(url.openConnection().connect())
    file <- Try(new File(filename))
  } yield Try(url  #> file !!)) map {x => new File(filename)}

  def Writer(filename: String)(op: java.io.PrintWriter => Unit) = {
    val p = new java.io.PrintWriter(new File(filename))
    try op(p)
    finally p.close()
  }

  def Write(filename: String, content: String) = {
    val res = new java.io.PrintWriter(new File(filename))
    res.write(content)
    res.close()
  }

  def Filter = new FileFilter {
    override def accept(pathname: File): Boolean = pathname.getName.toLowerCase.endsWith(".nes")
  }

  def Image(file:Try[File]) = file.map(ImageIO.read)

  def Image(filename:String) = Try(ImageIO.read(resource(filename)))

  def Xml(filename:String) = XML.load(resource("/database.xml"))

  def Config(filename:String) = {
    val file = new File(filename)
    file.exists() match {
      case true => ConfigFactory.parseFile(file)
      case false => ConfigFactory.empty()
    }
  }

  def SaveState(console:Console) = {
    val fos = new FileOutputStream(s"$ApplicationFolder/${console.cartridge.CRC}.save")
    val oos = new ObjectOutputStream(fos)

    oos.writeObject(console)
    oos.close()
  }

  def LoadState(crc:String):Try[Console] = Try {
    val fis = new FileInputStream(s"$ApplicationFolder/$crc.save")
    val ois = new ObjectInputStreamWithCustomClassLoader(fis)

    val console = ois.readObject.asInstanceOf[Console]
    ois.close()
    console
  }

  // Taken from: https://gist.github.com/ramn/5566596
  private class ObjectInputStreamWithCustomClassLoader(fileInputStream: FileInputStream) extends ObjectInputStream(fileInputStream) {
    override def resolveClass(desc: java.io.ObjectStreamClass): Class[_] = {
      try { Class.forName(desc.getName, false, getClass.getClassLoader) }
      catch { case ex: ClassNotFoundException => super.resolveClass(desc) }
    }
  }

  lazy val ApplicationFolder: File = {
    val settingDirectory = System.getProperty("user.home") + "/.nescala"
    val settings = new java.io.File(settingDirectory)
    if (!settings.exists()) settings.mkdir()
    settings
  }

  private def resource(filename:String) = getClass.getResourceAsStream(filename)
} 
Example 26
Source File: LogCollector.scala    From pulse   with Apache License 2.0 5 votes vote down vote up
package io.phdata.pulse.logcollector

import java.io.FileInputStream
import java.util.Properties
import java.util.concurrent.TimeUnit

import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.stream.{ ActorMaterializer, Materializer }
import com.typesafe.scalalogging.LazyLogging
import io.phdata.pulse.common.SolrService
import io.phdata.pulse.solr.SolrProvider
import org.apache.kudu.client.KuduClient.KuduClientBuilder

import scala.concurrent.duration.Duration
import scala.concurrent.{ Await, Future }
import scala.util.{ Failure, Success }


  def main(args: Array[String]): Unit =
    System.getProperty("java.security.auth.login.config") match {
      case null => {
        logger.info(
          "java.security.auth.login.config is not set, continuing without kerberos authentication")
      }
      case _ => {
        KerberosContext.scheduleKerberosLogin(0, 9, TimeUnit.HOURS)
      }

      start(args)

    }

  private def start(args: Array[String]): Unit = {
    val cliParser = new LogCollectorCliParser(args)

    val solrService = SolrProvider.create(cliParser.zkHosts().split(",").toList)
    val solrStream  = new SolrCloudStream(solrService)

    val kuduClient =
      cliParser.kuduMasters.toOption.map(masters =>
        KerberosContext.runPrivileged(new KuduClientBuilder(masters).build()))

    val kuduService =
      kuduClient.map(client => KerberosContext.runPrivileged(new KuduService(client)))

    val routes = new LogCollectorRoutes(solrStream, kuduService)

    cliParser.mode() match {
      case "kafka" => {
        kafka(solrService, cliParser.kafkaProps(), cliParser.topic())
      }
      case _ => {
        http(cliParser.port(), routes)
      }
    }
  }

  // Starts Http Service
  def http(port: Int, routes: LogCollectorRoutes): Future[Unit] = {
    implicit val actorSystem: ActorSystem   = ActorSystem()
    implicit val ec                         = actorSystem.dispatchers.lookup("akka.actor.http-dispatcher")
    implicit val materializer: Materializer = ActorMaterializer.create(actorSystem)

    val httpServerFuture = Http().bindAndHandle(routes.routes, "0.0.0.0", port)(materializer) map {
      binding =>
        logger.info(s"Log Collector interface bound to: ${binding.localAddress}")
    }

    httpServerFuture.onComplete {
      case Success(v) => ()
      case Failure(ex) => {
        logger.error("HTTP server failed, exiting. ", ex)
        System.exit(1)
      }
    }

    Await.ready(
      httpServerFuture,
      Duration.Inf
    )
  }

  // Starts Kafka Consumer
  def kafka(solrService: SolrService, kafkaProps: String, topic: String): Unit = {

    val solrCloudStream = new SolrCloudStream(solrService)

    val kafkaConsumer      = new PulseKafkaConsumer(solrCloudStream)
    val kafkaConsumerProps = new Properties()

    kafkaConsumerProps.load(new FileInputStream(kafkaProps))

    kafkaConsumer.read(kafkaConsumerProps, topic)
  }
} 
Example 27
Source File: Cp.scala    From benchmarks   with Apache License 2.0 5 votes vote down vote up
package com.rossabaker
package benchmarks

import org.openjdk.jmh.annotations._

@State(Scope.Thread)
@Fork(2)
@Measurement(iterations = 10)
@Warmup(iterations = 10)
@Threads(1)
class Cp extends BenchmarkUtils {
  @Benchmark
  def fs2Sync(): Unit = {
    import _root_.fs2._, Stream._
    import java.nio.file.Paths
    io.file.readAll[Task](Paths.get("testdata/lorem-ipsum.txt"), 4096)
      .to(io.file.writeAll[Task](Paths.get("out/lorem-ipsum.txt")))
      .run
      .unsafeRun
  }

  @Benchmark
  def fs2Async(): Unit = {
    import _root_.fs2._, Stream._
    import java.nio.file.Paths
    io.file.readAllAsync[Task](Paths.get("testdata/lorem-ipsum.txt"), 4096)
      .to(io.file.writeAllAsync[Task](Paths.get("out/lorem-ipsum.txt")))
      .run
      .unsafeRun
  }

  @Benchmark
  def scalazStreamIo(): Unit = {
    import _root_.scalaz.stream._, Process._
    constant(4096)
      .through(io.fileChunkR("testdata/lorem-ipsum.txt"))
      .to(io.fileChunkW("out/lorem-ipsum.txt"))
      .run
      .unsafePerformSync
  }

  @Benchmark
  def scalazStreamNio(): Unit = {
    import _root_.scalaz.stream._, Process._
    constant(4096)
      .through(nio.file.chunkR("testdata/lorem-ipsum.txt"))
      .to(nio.file.chunkW("out/lorem-ipsum.txt"))
      .run
      .unsafePerformSync
  }

   }
            callback.onError(ex)
          }

          def onComplete(): Unit = {
            try {
              out.close()
              callback.onSuccess(())
            } catch {
              case NonFatal(ex) =>
                callback.onError(ex)
            }
          }
        }
      }

    Await.result(
      copyFile(new File("testdata/lorem-ipsum.txt"), new File("out/lorem-ipsum.txt"), 4096)
        .runAsync(monixScheduler),
      Duration.Inf
    )
  }
} 
Example 28
Source File: Main.scala    From seals   with Apache License 2.0 5 votes vote down vote up
package com.example.streaming

import java.io.{ InputStream, OutputStream, FileInputStream, FileOutputStream }

import cats.implicits._
import cats.effect.{ IO, IOApp, Blocker, ExitCode }

import fs2.{ Stream, Chunk, Pure }

import dev.tauri.seals.scodec.StreamCodecs._

object Main extends IOApp {

  sealed trait Color
  final case object Brown extends Color
  final case object Grey extends Color

  sealed trait Animal
  final case class Elephant(name: String, tuskLength: Float) extends Animal
  final case class Quokka(name: String, color: Color = Brown) extends Animal
  final case class Quagga(name: String, speed: Double) extends Animal

  def transform(from: InputStream, to: OutputStream)(f: Animal => Stream[Pure, Animal]): IO[Unit] = {
    Blocker[IO].use { blocker =>
      val input = fs2.io.readInputStream(
        IO.pure(from),
        chunkSize = 1024,
        blocker = blocker
      )
      val sIn: Stream[IO, Animal] = input.through(streamDecoderFromReified[Animal].toPipeByte[IO]).flatMap(f)
      val sOut: Stream[IO, Unit] = streamEncoderFromReified[Animal].encode(sIn).flatMap { bv =>
        Stream.chunk(Chunk.bytes(bv.bytes.toArray))
      }.through(fs2.io.writeOutputStream(
        IO.pure(to),
        blocker = blocker,
        closeAfterUse = true
      ))
      sOut.compile.drain
    }
  }

  val transformer: Animal => Stream[Pure, Animal] = {
    case Elephant(n, tl) => Stream(Elephant(n, tl + 17))
    case Quokka(n, Brown) => Stream(Quokka(n, Grey))
    case q @ Quokka(_, _) => Stream(q)
    case Quagga(_, _) => Stream.empty
  }

  override def run(args: List[String]): IO[ExitCode] = {
    val (from, to) = args match {
      case List(from, to, _*) =>
        (from, to)
      case List(from) =>
        (from, "out.bin")
      case _ =>
        ("in.bin", "out.bin")
    }

    val task = transform(new FileInputStream(from), new FileOutputStream(to))(transformer)
    task.as(ExitCode.Success)
  }
} 
Example 29
Source File: TotalTweetsScheduler.scala    From redrock   with Apache License 2.0 5 votes vote down vote up
package com.restapi

import java.io.{File, FileInputStream}

import akka.actor.{ActorRef, Actor, ActorSystem, Props}
import akka.io.IO
import org.slf4j.LoggerFactory
import play.api.libs.json.Json
import spray.can.Http
import akka.pattern.ask
import spray.http.DateTime
import scala.concurrent.duration._
import akka.util.Timeout
import scala.concurrent.ExecutionContext.Implicits.global
import org.apache.commons.codec.digest.DigestUtils
import scala.io.Source

case object GetTotalTweetsScheduler

object CurrentTotalTweets {
  @volatile
  var totalTweets: Long = 0
}

class ExecuterTotalTweetsES(delay: FiniteDuration, interval: FiniteDuration) extends Actor {
  context.system.scheduler.schedule(delay, interval) {
    getTotalTweetsES
  }

  val logger = LoggerFactory.getLogger(this.getClass)

  override def receive: Actor.Receive = {
    case GetTotalTweetsScheduler => {
      logger.info(s"Getting Total of Tweets. Begin: ${CurrentTotalTweets.totalTweets}")
    }
    case _ => // just ignore any messages
  }

  def getTotalTweetsES: Unit = {
    val elasticsearchRequests = new GetElasticsearchResponse(0, Array[String](), Array[String](),
      LoadConf.restConf.getString("searchParam.defaulStartDatetime"),
      LoadConf.restConf.getString("searchParam.defaultEndDatetime"),
      LoadConf.esConf.getString("decahoseIndexName"))
    val totalTweetsResponse = Json.parse(elasticsearchRequests.getTotalTweetsESResponse())
    logger.info(s"Getting Total of Tweets. Current: ${CurrentTotalTweets.totalTweets}")
    CurrentTotalTweets.totalTweets = (totalTweetsResponse \ "hits" \ "total").as[Long]
    logger.info(s"Total users updated. New: ${CurrentTotalTweets.totalTweets}")
  }
} 
Example 30
Source File: SearchManagementRepository.scala    From smui   with Apache License 2.0 5 votes vote down vote up
package models

import java.io.FileInputStream
import java.time.LocalDateTime
import java.util.UUID
import java.util.Date

import anorm.SqlParser.get
import javax.inject.Inject
import anorm._
import models.FeatureToggleModel.FeatureToggleService
import models.SearchInput.ID
import play.api.db.DBApi

@javax.inject.Singleton
class SearchManagementRepository @Inject()(dbapi: DBApi, toggleService: FeatureToggleService)(implicit ec: DatabaseExecutionContext) {

  private val db = dbapi.database("default")

  // On startup, always sync predefined tags with the DB
  syncPredefinedTagsWithDB()

  private def syncPredefinedTagsWithDB(): Unit = {
    db.withTransaction { implicit connection =>
      if (toggleService.isRuleTaggingActive) {
        for (fileName <- toggleService.predefinedTagsFileName) {
          val tags = PredefinedTag.fromStream(new FileInputStream(fileName))
          PredefinedTag.updateInDB(tags)
        }
      }
    }
  }

  
  def addNewSearchInput(solrIndexId: SolrIndexId, searchInputTerm: String, tags: Seq[InputTagId]): SearchInputId = db.withConnection { implicit connection =>
    val id = SearchInput.insert(solrIndexId, searchInputTerm).id
    if (tags.nonEmpty) {
      TagInputAssociation.updateTagsForSearchInput(id, tags)
    }
    id
  }

  def getDetailedSearchInput(searchInputId: SearchInputId): Option[SearchInputWithRules] = db.withConnection { implicit connection =>
    SearchInputWithRules.loadById(searchInputId)
  }

  def updateSearchInput(searchInput: SearchInputWithRules): Unit = db.withTransaction { implicit connection =>
    SearchInputWithRules.update(searchInput)
  }

  def deleteSearchInput(searchInputId: String): Int = db.withTransaction { implicit connection =>
    SearchInputWithRules.delete(SearchInputId(searchInputId))
  }

  def listAllSuggestedSolrFields(solrIndexId: String): List[SuggestedSolrField] = db.withConnection { implicit connection =>
    SuggestedSolrField.listAll(SolrIndexId(solrIndexId))
  }

  def addNewSuggestedSolrField(solrIndexId: SolrIndexId, suggestedSolrFieldName: String): SuggestedSolrField = db.withConnection { implicit connection =>
    SuggestedSolrField.insert(solrIndexId, suggestedSolrFieldName)
  }

  def addNewDeploymentLogOk(solrIndexId: String, targetPlatform: String): Boolean = db.withConnection { implicit connection =>
    SQL("insert into deployment_log(id, solr_index_id, target_platform, last_update, result) values ({id}, {solr_index_id}, {target_platform}, {last_update}, {result})")
      .on(
        'id -> UUID.randomUUID().toString,
        'solr_index_id -> solrIndexId,
        'target_platform -> targetPlatform,
        'last_update -> new Date(),
        'result -> 0
      )
      .execute()
  }

  case class DeploymentLogDetail(id: String, lastUpdate: LocalDateTime, result: Int)

  val sqlParserDeploymentLogDetail: RowParser[DeploymentLogDetail] = {
    get[String](s"deployment_log.id") ~
      get[LocalDateTime](s"deployment_log.last_update") ~
      get[Int](s"deployment_log.result") map { case id ~ lastUpdate ~ result =>
      DeploymentLogDetail(id, lastUpdate, result)
    }
  }

  def lastDeploymentLogDetail(solrIndexId: String, targetPlatform: String): Option[DeploymentLogDetail] = db.withConnection {
    implicit connection => {
      SQL"select * from deployment_log where solr_index_id = $solrIndexId and target_platform = $targetPlatform order by last_update desc".as(sqlParserDeploymentLogDetail.*).headOption
    }
  }

} 
Example 31
Source File: NeuralNetwork.scala    From Scala-Machine-Learning-Projects   with MIT License 5 votes vote down vote up
package Yelp.Trainer

import org.deeplearning4j.nn.conf.MultiLayerConfiguration
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork
import org.nd4j.linalg.factory.Nd4j
import java.io.File
import org.apache.commons.io.FileUtils
import java.io.{DataInputStream, DataOutputStream, FileInputStream}
import java.nio.file.{Files, Paths}

object NeuralNetwork {  
  def loadNN(NNconfig: String, NNparams: String) = {
    // get neural network config
    val confFromJson: MultiLayerConfiguration = MultiLayerConfiguration.fromJson(FileUtils.readFileToString(new File(NNconfig)))    
     // get neural network parameters 
    val dis: DataInputStream = new DataInputStream(new FileInputStream(NNparams))
    val newParams = Nd4j.read(dis)    
     // creating network object
    val savedNetwork: MultiLayerNetwork = new MultiLayerNetwork(confFromJson)
    savedNetwork.init()
    savedNetwork.setParameters(newParams)    
    savedNetwork
  }
  
  def saveNN(model: MultiLayerNetwork, NNconfig: String, NNparams: String) = {
    // save neural network config
    FileUtils.write(new File(NNconfig), model.getLayerWiseConfigurations().toJson())     
    // save neural network parms
    val dos: DataOutputStream = new DataOutputStream(Files.newOutputStream(Paths.get(NNparams)))
    Nd4j.write(model.params(), dos)
  }  
} 
Example 32
Source File: BigQueryFixture.scala    From scalikejdbc-bigquery   with Apache License 2.0 5 votes vote down vote up
package scalikejdbc.bigquery

import java.io.FileInputStream

import com.google.auth.oauth2.GoogleCredentials
import com.google.cloud.bigquery.{BigQueryOptions, BigQuery}

trait BigQueryFixture {

  def projectId(): String = sys.env("GCLOUD_PROJECT")

  def mkBigQuery(): BigQuery = {
    val jsonKeyFileLocation = sys.env("GCLOUD_SERVICE_KEY_LOCATION")
    val credentials = GoogleCredentials.fromStream(new FileInputStream(jsonKeyFileLocation))

    BigQueryOptions.newBuilder()
      .setCredentials(credentials)
      .setProjectId(projectId())
      .build()
      .getService
  }
} 
Example 33
Source File: MetricsRepoService.scala    From prometheus-opentsdb-exporter   with Apache License 2.0 5 votes vote down vote up
package services

import scala.concurrent.duration._

import java.io.{File, FileInputStream}
import javax.inject._

import akka.actor.{ActorNotFound, ActorSystem}
import akka.util.Timeout

import play.api.libs.json._
import play.api.{Configuration, Logger}

import models.Metric
import actors.MetricsRepoActor
import actors.MetricsRepoActor.{RegisterMetrics, ResetMetrics}


@Singleton
class MetricsRepoService @Inject()(
  configuration: Configuration,
  system: ActorSystem
) {
  private implicit val to: Timeout = 5 seconds

  private val metricsDir = configuration.getString("metrics.dir").get

  private implicit val ec = system.dispatcher

  private def getListOfFiles(dir: String):List[File] = {
    val d = new File(dir)
    if (d.exists && d.isDirectory) {
      d.listFiles.filter(_.isFile).toList.sortBy(_.getAbsolutePath)
    } else {
      Logger.warn(s"Metrics dir not found: $dir")
      Logger.info(s"Working dir: ${new File(".").getAbsolutePath}")
      List[File]()
    }
  }

  lazy val metricsRepo = {
    Logger.info(s"Initializing the metrics repo.")
    system.actorSelection(s"${MetricsRepoActor.name}")
      .resolveOne()
      .recover {
        case ActorNotFound(_) =>
          system.actorOf(MetricsRepoActor.props(), MetricsRepoActor.name)
      }
  }

  def reloadMetrics(): Unit = {
    metricsRepo.foreach { mr =>
      Logger.info("Loading metrics definitions.")

      mr ! ResetMetrics

      getListOfFiles(metricsDir).foreach { f =>
        Logger.info(s"Loading metrics definitions from: ${f.getAbsolutePath}")

        Json.parse(new FileInputStream(f)).validate[Seq[Metric]].fold(
          valid = metrics => {
            Logger.info("Metrics definitions parsed and validating. Reloading...")
            mr ! RegisterMetrics(metrics)
          },
          invalid = errors =>
            Logger.error(errors.mkString("\n"))
        )
      }
    }
  }

  reloadMetrics()
} 
Example 34
Source File: Https.scala    From openwhisk   with Apache License 2.0 5 votes vote down vote up
package org.apache.openwhisk.common

import java.io.{FileInputStream, InputStream}
import java.security.{KeyStore, SecureRandom}
import javax.net.ssl.{KeyManagerFactory, SSLContext, TrustManagerFactory}

import akka.http.scaladsl.ConnectionContext
import akka.stream.TLSClientAuth
import com.typesafe.sslconfig.akka.AkkaSSLConfig

object Https {
  case class HttpsConfig(keystorePassword: String, keystoreFlavor: String, keystorePath: String, clientAuth: String)

  def getCertStore(password: Array[Char], flavor: String, path: String): KeyStore = {
    val cs: KeyStore = KeyStore.getInstance(flavor)
    val certStore: InputStream = new FileInputStream(path)
    cs.load(certStore, password)
    cs
  }

  def connectionContext(httpsConfig: HttpsConfig, sslConfig: Option[AkkaSSLConfig] = None) = {

    val keyFactoryType = "SunX509"
    val clientAuth = {
      if (httpsConfig.clientAuth.toBoolean)
        Some(TLSClientAuth.need)
      else
        Some(TLSClientAuth.none)
    }

    val keystorePassword = httpsConfig.keystorePassword.toCharArray

    val keyStore: KeyStore = KeyStore.getInstance(httpsConfig.keystoreFlavor)
    val keyStoreStream: InputStream = new FileInputStream(httpsConfig.keystorePath)
    keyStore.load(keyStoreStream, keystorePassword)

    val keyManagerFactory: KeyManagerFactory = KeyManagerFactory.getInstance(keyFactoryType)
    keyManagerFactory.init(keyStore, keystorePassword)

    // Currently, we are using the keystore as truststore as well, because the clients use the same keys as the
    // server for client authentication (if enabled).
    // So this code is guided by https://doc.akka.io/docs/akka-http/10.0.9/scala/http/server-side-https-support.html
    // This needs to be reworked, when we fix the keys and certificates.
    val trustManagerFactory: TrustManagerFactory = TrustManagerFactory.getInstance(keyFactoryType)
    trustManagerFactory.init(keyStore)

    val sslContext: SSLContext = SSLContext.getInstance("TLS")
    sslContext.init(keyManagerFactory.getKeyManagers, trustManagerFactory.getTrustManagers, new SecureRandom)

    ConnectionContext.https(sslContext, sslConfig, clientAuth = clientAuth)
  }
} 
Example 35
Source File: FileUtil.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package ml.combust.bundle.serializer

import java.io.{File, FileInputStream, FileOutputStream}
import java.util.zip.{ZipEntry, ZipInputStream, ZipOutputStream}

import resource._


case class FileUtil() {
  def rmRF(path: File): Array[(String, Boolean)] = {
    Option(path.listFiles).map(_.flatMap(f => rmRF(f))).getOrElse(Array()) :+ (path.getPath -> path.delete)
  }

  def extract(source: File, dest: File): Unit = {
    dest.mkdirs()
    for(in <- managed(new ZipInputStream(new FileInputStream(source)))) {
      extract(in, dest)
    }
  }

  def extract(in: ZipInputStream, dest: File): Unit = {
    dest.mkdirs()
    val buffer = new Array[Byte](1024 * 1024)

    var entry = in.getNextEntry
    while(entry != null) {
      if(entry.isDirectory) {
        new File(dest, entry.getName).mkdirs()
      } else {
        val filePath = new File(dest, entry.getName)
        for(out <- managed(new FileOutputStream(filePath))) {
          var len = in.read(buffer)
          while(len > 0) {
            out.write(buffer, 0, len)
            len = in.read(buffer)
          }
        }
      }
      entry = in.getNextEntry
    }
  }

  def zip(source: File, dest: File): Unit = {
    for(out <- managed(new ZipOutputStream(new FileOutputStream(dest)))) {
      zip(source, out)
    }
  }

  def zip(source: File, dest: ZipOutputStream): Unit = zip(source, source, dest)

  def zip(base: File, source: File, dest: ZipOutputStream): Unit = {
    val buffer = new Array[Byte](1024 * 1024)

    for(files <- Option(source.listFiles);
        file <- files) {
      val name = file.toString.substring(base.toString.length + 1)

      if(file.isDirectory) {
        dest.putNextEntry(new ZipEntry(s"$name/"))
        zip(base, file, dest)
      } else {
        dest.putNextEntry(new ZipEntry(name))

        for (in <- managed(new FileInputStream(file))) {
          var read = in.read(buffer)
          while (read > 0) {
            dest.write(buffer, 0, read)
            read = in.read(buffer)
          }
        }
      }
    }
  }
} 
Example 36
Source File: Zip.scala    From sbt-flaky   with Apache License 2.0 5 votes vote down vote up
package flaky.history

import java.io.{File, FileInputStream, FileOutputStream}
import java.util.zip.{ZipEntry, ZipOutputStream}

object Zip {


  def compressFolder(zipFilePath: File, folder: File): Unit = {
    import java.io.File
    def recursiveListFiles(f: File): Array[File] = {
      val these = f.listFiles
      these.filter(_.isFile) ++ these.filter(_.isDirectory).flatMap(recursiveListFiles)
    }

    val toList = recursiveListFiles(folder).toList
    compress(zipFilePath, folder, toList)
  }

  def compress(zipFilePath: File, root: File, files: List[File]): Unit = {
    val zip = new ZipOutputStream(new FileOutputStream(zipFilePath))
    val rootPath = root.getAbsolutePath
    try {
      for (file <- files) {
        zip.putNextEntry(new ZipEntry(file.getAbsolutePath.substring(rootPath.length)))
        val in = new FileInputStream(file)
        try {
          Iterator
            .continually(in.read())
            .takeWhile(_ > -1)
            .foreach(zip.write)
        } finally {
          in.close()
        }
        zip.closeEntry()
      }
    }
    finally {
      zip.close()
    }
  }
} 
Example 37
Source File: Unzip.scala    From sbt-flaky   with Apache License 2.0 5 votes vote down vote up
package flaky

import java.io.File

trait Unzip {

  def unzip(zipped: File, unzipDir: File, deleteOnExit: Boolean = true): Unit = {
    import java.io.{FileInputStream, FileOutputStream}
    import java.util.zip.ZipInputStream
    val fis = new FileInputStream(zipped)
    val zis = new ZipInputStream(fis)

    unzipDir.mkdirs()
    Stream
      .continually(zis.getNextEntry)
      .takeWhile(_ != null)
      .foreach { file =>
        if (file.isDirectory) {
          val dir = new File(unzipDir, file.getName)
          dir.mkdirs()
          if (deleteOnExit){
            dir.deleteOnExit()
          }
        } else {
          val file1 = new File(unzipDir, file.getName)
          if (deleteOnExit){
            file1.deleteOnExit()
          }
          val fout = new FileOutputStream(file1)
          val buffer = new Array[Byte](1024)
          Stream.continually(zis.read(buffer)).takeWhile(_ != -1).foreach(fout.write(buffer, 0, _))
        }
      }
  }
} 
Example 38
Source File: PigTransformation.scala    From schedoscope   with Apache License 2.0 5 votes vote down vote up
package org.schedoscope.dsl.transformations

import java.io.{FileInputStream, InputStream}

import org.apache.commons.lang.StringUtils
import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege
import org.apache.hadoop.hive.ql.udf.UDFLength
import org.apache.hive.hcatalog.data.schema.HCatSchema
import org.apache.hive.hcatalog.pig.HCatLoader
import org.apache.pig.builtin.ParquetStorer
import org.schedoscope.scheduler.service.ViewTransformationStatus



case class PigTransformation(latin: String, dirsToDelete: List[String] = List()) extends Transformation {

  def name = "pig"

  override def stringsToChecksum = List(latin)

  description = "[..]" + StringUtils.abbreviate(latin.replaceAll("\n", "").replaceAll("\t", "").replaceAll("\\s+", " "), 60)

  def defaultLibraries = {
    // FIXME: declare jars instead of any random class included in this jar
    val classes = List(
      // needed for usage of HCatalog table management
      classOf[HCatLoader], classOf[HCatSchema], classOf[HiveObjectPrivilege], classOf[UDFLength],
      // needed for usage of storage format Parquet with pig
      classOf[ParquetStorer])
    classes.map(cl => try {
      cl.getProtectionDomain().getCodeSource().getLocation().getFile
    } catch {
      case t: Throwable => null
    })
      .filter(cl => cl != null && !"".equals(cl.trim))
  }

  override def viewTransformationStatus = ViewTransformationStatus(
    name,
    Some(Map("latin" -> latin)))
}

object PigTransformation {

  def scriptFrom(inputStream: InputStream): String = scala.io.Source.fromInputStream(inputStream, "UTF-8").mkString

  def scriptFromResource(resourcePath: String): String = scriptFrom(getClass().getClassLoader().getResourceAsStream(resourcePath))

  def scriptFrom(filePath: String): String = scriptFrom(new FileInputStream(filePath))
} 
Example 39
Source File: OozieTransformation.scala    From schedoscope   with Apache License 2.0 5 votes vote down vote up
package org.schedoscope.dsl.transformations

import java.io.{FileInputStream, InputStream}
import java.util.Properties

import org.apache.commons.lang.StringUtils
import org.schedoscope.Settings
import org.schedoscope.scheduler.service.ViewTransformationStatus

import scala.collection.JavaConversions._


case class OozieTransformation(bundle: String, workflow: String, var workflowAppPath: String) extends Transformation {
  def name = "oozie"

  override def fileResourcesToChecksum = List(workflowAppPath)

  description = StringUtils.abbreviate(s"${bundle}/${workflow}", 100)

  override def viewTransformationStatus = ViewTransformationStatus(
    name,
    Some(Map(
      "bundle" -> bundle,
      "workflow" -> workflow)))
}

object OozieTransformation {
  def oozieWFPath(bundle: String, workflow: String) = s"${Settings().getDriverSettings("oozie").location}/workflows/${bundle}/${workflow}/"

  def configurationFrom(inputStream: InputStream): Map[String, String] = {
    val props = new Properties()

    try {
      props.load(inputStream)
    } catch {
      case t: Throwable =>
    }

    Map() ++ props
  }

  def configurationFrom(filePath: String): Map[String, String] = try
    configurationFrom(new FileInputStream(filePath))
  catch {
    case t: Throwable => Map()
  }

  def configurationFromResource(resourcePath: String): Map[String, String] =
    try
      configurationFrom(getClass().getClassLoader().getResourceAsStream(resourcePath))
    catch {
      case t: Throwable => Map()
    }
} 
Example 40
Source File: ShapeReaderSuite.scala    From magellan   with Apache License 2.0 5 votes vote down vote up
package magellan.io

import java.io.{DataInputStream, File, FileInputStream}

import org.apache.commons.io.EndianUtils
import org.scalatest.FunSuite

class ShapeReaderSuite extends FunSuite {

  test("Read Polygon") {
    val path = this.getClass.getClassLoader.getResource("testpolygon/testpolygon.shp").getPath
    val dis = new DataInputStream(new FileInputStream(new File(path)))
    val header = new Array[Byte](100)
    dis.readFully(header, 0, 100) // discard the first 100 bytes

    // discard the record header and content length
    assert(dis.readInt() === 1)
    val contentLength = 2 * dis.readInt() //content length in bytes

    // contentlength = shapetype(int) + bounding box (4 doubles) + numParts (int) + numPoints (int) +
    // parts (int) + points (16 * length) bytes

    val expectedLength = (contentLength - 4 - 4 * 8 - 4 - 4 - 4) / 16

    println(expectedLength)

    // discard the geometry type
    assert(EndianUtils.swapInteger(dis.readInt()) === 5)

    // now read the polygon
    val polygonReader = new PolygonReader()
    val polygon = polygonReader.readFields(dis)
    assert(polygon.length() === expectedLength)
  }
} 
Example 41
Source File: HDFSFileService.scala    From retail_analytics   with Apache License 2.0 5 votes vote down vote up
package models

import scalaz._
import Scalaz._
import scalaz.EitherT._
import scalaz.Validation
import scalaz.NonEmptyList._
import java.io.BufferedInputStream
import java.io.File
import java.io.FileInputStream
import java.io.InputStream
import org.apache.hadoop.conf._
import org.apache.hadoop.fs._

object HDFSFileService {
  private val conf = new Configuration()
  private val hdfsCoreSitePath = new Path("core-site.xml")
  private val hdfsHDFSSitePath = new Path("hdfs-site.xml")

  conf.addResource(hdfsCoreSitePath)
  conf.addResource(hdfsHDFSSitePath)

  private val fileSystem = FileSystem.get(conf)

  def saveFile(filepath: String): ValidationNel[Throwable, String] = {
    (Validation.fromTryCatch[String] {
      
      val file = new File(filepath)
      val out = fileSystem.create(new Path(file.getName))
      
      
      val in = new BufferedInputStream(new FileInputStream(file))
      var b = new Array[Byte](1024)
      var numBytes = in.read(b)
      while (numBytes > 0) {
        out.write(b, 0, numBytes)
        numBytes = in.read(b)
      }
      
      in.close()
      out.close()
      "File Uploaded"
    } leftMap { t: Throwable => nels(t) })

  }
 

  def removeFile(filename: String): Boolean = {
    val path = new Path(filename)
    fileSystem.delete(path, true)
  }

  def getFile(filename: String): InputStream = {
    val path = new Path(filename)
    fileSystem.open(path)
  }

  def createFolder(folderPath: String): Unit = {
    val path = new Path(folderPath)
    if (!fileSystem.exists(path)) {
      fileSystem.mkdirs(path)
    }
  }
} 
Example 42
Source File: AkkaBuild.scala    From perf_tester   with Apache License 2.0 5 votes vote down vote up
package akka

import java.io.{ FileInputStream, InputStreamReader }
import java.util.Properties

import sbt.Keys._
import sbt._
import scala.collection.breakOut

object AkkaBuild {

  val enableMiMa = true

  lazy val buildSettings = Dependencies.Versions ++ Seq(
    organization := "com.typesafe.akka",
    version := "2.5-SNAPSHOT")

  private def allWarnings: Boolean = System.getProperty("akka.allwarnings", "false").toBoolean

  lazy val defaultSettings = 
    Seq[Setting[_]](
      // compile options
      scalacOptions in Compile ++= Seq("-encoding", "UTF-8", "-target:jvm-1.8", "-feature", "-unchecked", "-Xlog-reflective-calls", "-Xlint"),
      scalacOptions in Compile ++= (if (allWarnings) Seq("-deprecation") else Nil),
      // -XDignore.symbol.file suppresses sun.misc.Unsafe warnings
      javacOptions in compile ++= Seq("-encoding", "UTF-8", "-source", "1.8", "-target", "1.8", "-Xlint:unchecked", "-XDignore.symbol.file"),
      javacOptions in compile ++= (if (allWarnings) Seq("-Xlint:deprecation") else Nil),
      javacOptions in doc ++= Seq(),

      crossVersion := CrossVersion.binary,

      ivyLoggingLevel in ThisBuild := UpdateLogging.Quiet,

      licenses := Seq(("Apache License, Version 2.0", url("http://www.apache.org/licenses/LICENSE-2.0"))),
      homepage := Some(url("http://akka.io/")),

      apiURL := Some(url(s"http://doc.akka.io/api/akka/${version.value}"))
  )

  def loadSystemProperties(fileName: String): Unit = {
    import scala.collection.JavaConverters._
    val file = new File(fileName)
    if (file.exists()) {
      println("Loading system properties from file `" + fileName + "`")
      val in = new InputStreamReader(new FileInputStream(file), "UTF-8")
      val props = new Properties
      props.load(in)
      in.close()
      sys.props ++ props.asScala
    }
  }

  def majorMinor(version: String): Option[String] = """\d+\.\d+""".r.findFirstIn(version)
} 
Example 43
Source File: GCSRepository.scala    From sbt-google-cloud-storage   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.gcsplugin

import java.util
import java.io._
import java.io.FileInputStream

import scala.collection.JavaConverters._

import com.google.cloud.storage._
import com.google.cloud.storage.Storage
import com.google.cloud.storage.Bucket._
import com.lightbend.gcsplugin.AccessRights._
import org.apache.ivy.core.module.descriptor._
import org.apache.ivy.plugins.repository._

case class GCSRepository(bucketName: String, publishPolicy: AccessRigths) extends AbstractRepository {
  private val storage: Storage = StorageOptions.getDefaultInstance.getService
  private lazy val bucket = storage.get(bucketName)

  override def getResource(source: String): GCSResource = {
    GCSResource.create(storage, bucketName, source)
  }

  override def get(source: String, destination: File): Unit = {

    val extSource = if (destination.toString.endsWith("sha1"))
      source + ".sha1"
    else if (destination.toString.endsWith("md5"))
      source + ".md5"
    else
      source

    GCSResource.toFile(storage, GCSResource.create(storage, bucketName, extSource), destination)
  }

  override def list(parent: String): util.List[String] = {
    storage.list(bucketName).getValues.asScala.map(_.getName).toList.asJava
  }

  override def put(artifact: Artifact, source: File, destination: String, overwrite: Boolean): Unit = {

    publishPolicy match {
      case AccessRights.PublicRead ⇒
        bucket.create(
          destination.replace("//", "/"),
          new FileInputStream(source),
          getContentType(artifact.getType),
          BlobWriteOption.predefinedAcl(Storage.PredefinedAcl.PUBLIC_READ)
        )
      case AccessRights.InheritBucket ⇒
        bucket.create(
          destination.replace("//", "/"),
          new FileInputStream(source),
          getContentType(artifact.getType)
        )
    }
  }

  private def getContentType(ext: String): String = {

    ext.toLowerCase match {
      case "jar"  ⇒ "application/java-archive"
      case "xml"  ⇒ "application/xml"
      case "sha1" ⇒ "text/plain"
      case "md5"  ⇒ "text/plain"
      case "ivy"  ⇒ "application/xml"
      case _      ⇒ "application/octet-stream"
    }
  }
} 
Example 44
Source File: SSLSupport.scala    From sparta   with Apache License 2.0 5 votes vote down vote up
package com.stratio.sparkta.serving.api.ssl

import java.io.FileInputStream
import java.security.{KeyStore, SecureRandom}
import javax.net.ssl.{KeyManagerFactory, SSLContext, TrustManagerFactory}

import com.stratio.sparta.serving.api.helpers.SpartaHelper.log
import com.stratio.sparta.serving.core.config.SpartaConfig
import spray.io._

import scala.util.{Failure, Success, Try}

trait SSLSupport {

  implicit def sslContext: SSLContext = {
    val context = SSLContext.getInstance("TLS")
    if(isHttpsEnabled) {
      val keyStoreResource = SpartaConfig.apiConfig.get.getString("certificate-file")
      val password = SpartaConfig.apiConfig.get.getString("certificate-password")

      val keyStore = KeyStore.getInstance("jks")
      keyStore.load(new FileInputStream(keyStoreResource), password.toCharArray)
      val keyManagerFactory = KeyManagerFactory.getInstance("SunX509")
      keyManagerFactory.init(keyStore, password.toCharArray)
      val trustManagerFactory = TrustManagerFactory.getInstance("SunX509")
      trustManagerFactory.init(keyStore)
      context.init(keyManagerFactory.getKeyManagers, trustManagerFactory.getTrustManagers, new SecureRandom)
    }
    context
  }

  implicit def sslEngineProvider: ServerSSLEngineProvider = {
    ServerSSLEngineProvider { engine =>
      engine.setEnabledCipherSuites(Array(
        "TLS_RSA_WITH_AES_128_CBC_SHA", "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384",
        "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384", "TLS_RSA_WITH_AES_256_CBC_SHA256",
        "TLS_ECDH_ECDSA_WITH_AES_256_CBC_SHA384", "TLS_ECDH_RSA_WITH_AES_256_CBC_SHA384",
        "TLS_DHE_RSA_WITH_AES_256_CBC_SHA256", "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA",
        "TLS_DHE_RSA_WITH_AES_256_CBC_SHA", "TLS_RSA_WITH_AES_256_CBC_SHA",
        "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA", "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA"))
      engine.setEnabledProtocols(Array( "TLSv1.2" ))
      engine
    }
  }

  def isHttpsEnabled: Boolean =
    SpartaConfig.getSprayConfig match {
      case Some(config) =>
        Try(config.getValue("ssl-encryption")) match {
          case Success(value) =>
            "on".equals(value.unwrapped())
          case Failure(e) =>
            log.error("Incorrect value in ssl-encryption option, setting https disabled", e)
            false
        }
      case None =>
        log.warn("Impossible to get spray config, setting https disabled")
        false
    }
} 
Example 45
Source File: Command.scala    From scala-ssh   with Apache License 2.0 5 votes vote down vote up
package com.decodified.scalassh

import net.schmizz.sshj.connection.channel.direct.Session
import java.io.{ FileInputStream, File, ByteArrayInputStream, InputStream }

case class Command(command: String, input: CommandInput = CommandInput.NoInput, timeout: Option[Int] = None)

object Command {
  implicit def string2Command(cmd: String) = Command(cmd)
}

case class CommandInput(inputStream: Option[InputStream])

object CommandInput {
  lazy val NoInput = CommandInput(None)
  implicit def apply(input: String, charsetName: String = "UTF8"): CommandInput = apply(input.getBytes(charsetName))
  implicit def apply(input: Array[Byte]): CommandInput = apply(Some(new ByteArrayInputStream(input)))
  implicit def apply(input: InputStream): CommandInput = apply(Some(input))
  def fromFile(file: String): CommandInput = fromFile(new File(file))
  def fromFile(file: File): CommandInput = new FileInputStream(file)
  def fromResource(resource: String): CommandInput = getClass.getClassLoader.getResourceAsStream(resource)
}

class CommandResult(val channel: Session.Command) {
  def stdErrStream: InputStream = channel.getErrorStream
  def stdOutStream: InputStream = channel.getInputStream
  lazy val stdErrBytes = new StreamCopier().emptyToByteArray(stdErrStream)
  lazy val stdOutBytes = new StreamCopier().emptyToByteArray(stdOutStream)
  def stdErrAsString(charsetname: String = "utf8") = new String(stdErrBytes, charsetname)
  def stdOutAsString(charsetname: String = "utf8") = new String(stdOutBytes, charsetname)
  lazy val exitSignal: Option[String] = Option(channel.getExitSignal).map(_.toString)
  lazy val exitCode: Option[Int] = Option(channel.getExitStatus)
  lazy val exitErrorMessage: Option[String] = Option(channel.getExitErrorMessage)
} 
Example 46
Source File: Util.scala    From incubator-retired-gearpump   with Apache License 2.0 5 votes vote down vote up
package org.apache.gearpump.util

import java.io.{BufferedReader, File, FileInputStream, InputStreamReader}
import java.net.{ServerSocket, URI}
import scala.concurrent.forkjoin.ThreadLocalRandom
import scala.sys.process.Process
import scala.util.{Failure, Success, Try}

import com.typesafe.config.{Config, ConfigFactory}

import org.apache.gearpump.cluster.AppJar
import org.apache.gearpump.jarstore.JarStoreClient
import org.apache.gearpump.transport.HostPort

object Util {
  val LOG = LogUtil.getLogger(getClass)
  private val defaultUri = new URI("file:///")
  private val appNamePattern = "^[a-zA-Z_][a-zA-Z0-9_]+$".r.pattern

  def validApplicationName(appName: String): Boolean = {
    appNamePattern.matcher(appName).matches()
  }

  def getCurrentClassPath: Array[String] = {
    val classpath = System.getProperty("java.class.path")
    val classpathList = classpath.split(File.pathSeparator)
    classpathList
  }

  def version: String = {
    val home = System.getProperty(Constants.GEARPUMP_HOME)
    val version = Try {
      val versionFile = new FileInputStream(new File(home, "VERSION"))
      val reader = new BufferedReader(new InputStreamReader(versionFile))
      val version = reader.readLine().replace("version:=", "")
      versionFile.close()
      version
    }
    version match {
      case Success(version) =>
        version
      case Failure(ex) =>
        LOG.error("failed to read VERSION file, " + ex.getMessage)
        "Unknown-Version"
    }
  }

  def startProcess(options: Array[String], classPath: Array[String], mainClass: String,
      arguments: Array[String]): RichProcess = {
    val java = System.getProperty("java.home") + "/bin/java"

    val command = List(java) ++ options ++
      List("-cp", classPath.mkString(File.pathSeparator), mainClass) ++ arguments
    LOG.info(s"Starting executor process java $mainClass ${arguments.mkString(" ")} " +
      s"\n ${options.mkString(" ")}")
    val logger = new ProcessLogRedirector()
    val process = Process(command).run(logger)
    new RichProcess(process, logger)
  }

  
  def resolveJvmSetting(conf: Config): AppJvmSettings = {

    import org.apache.gearpump.util.Constants._

    val appMasterVMArgs = Try(conf.getString(GEARPUMP_APPMASTER_ARGS).split("\\s+")
      .filter(_.nonEmpty)).toOption
    val executorVMArgs = Try(conf.getString(GEARPUMP_EXECUTOR_ARGS).split("\\s+")
      .filter(_.nonEmpty)).toOption

    val appMasterClassPath = Try(
      conf.getString(GEARPUMP_APPMASTER_EXTRA_CLASSPATH)
        .split("[;:]").filter(_.nonEmpty)).toOption

    val executorClassPath = Try(
      conf.getString(GEARPUMP_EXECUTOR_EXTRA_CLASSPATH)
        .split(File.pathSeparator).filter(_.nonEmpty)).toOption

    AppJvmSettings(
      JvmSetting(appMasterVMArgs.getOrElse(Array.empty[String]),
        appMasterClassPath.getOrElse(Array.empty[String])),
      JvmSetting(executorVMArgs
        .getOrElse(Array.empty[String]), executorClassPath.getOrElse(Array.empty[String])))
  }

  def asSubDirOfGearpumpHome(dir: String): File = {
    new File(System.getProperty(Constants.GEARPUMP_HOME), dir)

  }
} 
Example 47
Source File: CRFFromParsedFile.scala    From CRF-Spark   with Apache License 2.0 5 votes vote down vote up
package com.intel.ssg.bdt.nlp

import java.io.{DataOutputStream, DataInputStream, FileInputStream, FileOutputStream}

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object CRFFromParsedFile {

  def main(args: Array[String]) {
    val templateFile = "src/test/resources/chunking/template"
    val trainFile = "src/test/resources/chunking/serialized/train.data"
    val testFile = "src/test/resources/chunking/serialized/test.data"

    val conf = new SparkConf().setAppName(s"${this.getClass.getSimpleName}")
    val sc = new SparkContext(conf)

    val templates: Array[String] = scala.io.Source.fromFile(templateFile).getLines().filter(_.nonEmpty).toArray
    val trainRDD: RDD[Sequence] = sc.textFile(trainFile).filter(_.nonEmpty).map(Sequence.deSerializer)

    val model: CRFModel = CRF.train(templates, trainRDD, 0.25, 1, 100, 1E-3, "L1")

    val testRDD: RDD[Sequence] = sc.textFile(testFile).filter(_.nonEmpty).map(Sequence.deSerializer)

    
    val results: RDD[Sequence] = model.setNBest(10)
      .setVerboseMode(VerboseLevel1)
      .predict(testRDD)

    val score = results
      .zipWithIndex()
      .map(_.swap)
      .join(testRDD.zipWithIndex().map(_.swap))
      .map(_._2)
      .map(x => x._1.compare(x._2))
      .reduce(_ + _)
    val total = testRDD.map(_.toArray.length).reduce(_ + _)
    println(s"Prediction Accuracy: $score / $total")

    sc.stop()
  }
} 
Example 48
Source File: LagomDevModePropertiesLoader.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.devmode.internal.util

import java.io.File
import java.io.FileInputStream
import java.util.Properties

object PropertiesLoader {
  def from(file: String): Properties = {
    val properties = new Properties()
    // First check if the file is on the classpath
    val is = {
      getClass.getResourceAsStream(file) match {
        case null =>
          // Try and load it as a file
          val f = new File(file)
          if (f.isFile) {
            new FileInputStream(f)
          } else {
            throw new IllegalArgumentException(s"File $file not found as classpath resource or on the filesystem")
          }
        case found => found
      }
    }

    try {
      properties.load(is)
      properties
    } finally {
      is.close()
    }
  }
} 
Example 49
Source File: exercise10.scala    From scala-for-the-Impatient   with MIT License 5 votes vote down vote up
import collection.mutable.ArrayBuffer
import java.io.{ObjectInputStream, FileOutputStream, FileInputStream, ObjectOutputStream}

class Person(var name:String) extends Serializable{

  val friends = new ArrayBuffer[Person]()

  def addFriend(friend : Person){
    friends += friend
  }

  override def toString() = {
    var str = "My name is " + name + " and my friends name is "
    friends.foreach(str += _.name + ",")
    str
  }
}


object Test extends App{
  val p1 = new Person("Ivan")
  val p2 = new Person("F2")
  val p3 = new Person("F3")

  p1.addFriend(p2)
  p1.addFriend(p3)
  println(p1)

  val out = new ObjectOutputStream(new FileOutputStream("person.obj"))
  out.writeObject(p1)
  out.close()

  val in =  new ObjectInputStream(new FileInputStream("person.obj"))
  val p = in.readObject().asInstanceOf[Person]
  println(p)
} 
Example 50
Source File: exercise08.scala    From scala-for-the-Impatient   with MIT License 5 votes vote down vote up
import java.io.{InputStream, FileInputStream}


trait Buffering {
  this: InputStream =>

  val BUF_SIZE: Int = 5
  private val buf = new Array[Byte](BUF_SIZE)
  private var bufsize: Int = 0
  private var pos: Int = 0

  override def read(): Int = {
    if (pos >= bufsize) {
      bufsize = this.read(buf, 0, BUF_SIZE)
      if (bufsize > 0) -1
      pos = 0
    }
    pos += 1
    buf(pos-1)
  }
}

val f = new FileInputStream("exercise08.txt") with Buffering

for(i <- 1 to 10) println(f.read()) 
Example 51
Source File: exercise09.scala    From scala-for-the-Impatient   with MIT License 5 votes vote down vote up
import java.io.{InputStream, FileInputStream}

trait Logger {
  def log(msg: String)
}

trait NoneLogger extends Logger {
  def log(msg: String) = {}
}

trait PrintLogger extends Logger {
  def log(msg: String) = println(msg)
}


trait Buffering {
  this: InputStream with Logger =>

  val BUF_SIZE: Int = 5
  private val buf = new Array[Byte](BUF_SIZE)
  private var bufsize: Int = 0
  private var pos: Int = 0

  override def read(): Int = {
    if (pos >= bufsize) {
      bufsize = this.read(buf, 0, BUF_SIZE)
      log("buffered %d bytes: %s".format(bufsize, buf.mkString(", ")))
      if (bufsize > 0) -1
      pos = 0
    }
    pos += 1
    buf(pos-1)
  }
}

val f = new FileInputStream("exercise08.txt") with Buffering with PrintLogger

for(i <- 1 to 10) println(f.read()) 
Example 52
Source File: FileUtils.scala    From eidos   with Apache License 2.0 5 votes vote down vote up
package org.clulab.wm.wmexchanger.utils

import java.io.BufferedInputStream
import java.io.BufferedOutputStream
import java.io.File
import java.io.FileInputStream
import java.io.FileOutputStream
import java.io.FilenameFilter
import java.io.ObjectInputStream
import java.io.ObjectOutputStream
import java.io.PrintWriter

import org.clulab.wm.wmexchanger.utils.Closer.AutoCloser

import scala.io.Source

object FileUtils {

  def appendingPrintWriterFromFile(file: File): PrintWriter = Sinker.printWriterFromFile(file, append = true)

  def appendingPrintWriterFromFile(path: String): PrintWriter = Sinker.printWriterFromFile(path, append = true)

  def printWriterFromFile(file: File): PrintWriter = Sinker.printWriterFromFile(file, append = false)

  def printWriterFromFile(path: String): PrintWriter = Sinker.printWriterFromFile(path, append = false)

  // Output
  def newBufferedOutputStream(file: File): BufferedOutputStream =
    new BufferedOutputStream(new FileOutputStream(file))

  def newBufferedOutputStream(filename: String): BufferedOutputStream =
    newBufferedOutputStream(new File(filename))

  def newAppendingBufferedOutputStream(file: File): BufferedOutputStream =
    new BufferedOutputStream(new FileOutputStream(file, true))

  def newAppendingBufferedOutputStream(filename: String): BufferedOutputStream =
    newAppendingBufferedOutputStream(new File(filename))

  def newObjectOutputStream(filename: String): ObjectOutputStream =
    new ObjectOutputStream(newBufferedOutputStream(filename))

  // Input
  def newBufferedInputStream(file: File): BufferedInputStream =
    new BufferedInputStream(new FileInputStream(file))

  def newBufferedInputStream(filename: String): BufferedInputStream =
    newBufferedInputStream(new File(filename))

  def newObjectInputStream(filename: String): ObjectInputStream =
    new ObjectInputStream(newBufferedInputStream(filename))

  def findFiles(collectionDir: String, extension: String): Seq[File] = {
    val dir = new File(collectionDir)
    val filter = new FilenameFilter {
      def accept(dir: File, name: String): Boolean = name.endsWith(extension)
    }

    val result = Option(dir.listFiles(filter))
        .getOrElse(throw Sourcer.newFileNotFoundException(collectionDir))
    result
  }

  protected def getTextFromSource(source: Source): String = source.mkString

  def getTextFromFile(file: File): String =
    Sourcer.sourceFromFile(file).autoClose { source =>
      getTextFromSource(source)
    }
} 
Example 53
Source File: MetricsConfig.scala    From SparkCore   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.metrics

import java.io.{FileInputStream, InputStream}
import java.util.Properties

import scala.collection.mutable
import scala.util.matching.Regex

import org.apache.spark.Logging
import org.apache.spark.util.Utils

private[spark] class MetricsConfig(val configFile: Option[String]) extends Logging {

  private val DEFAULT_PREFIX = "*"
  private val INSTANCE_REGEX = "^(\\*|[a-zA-Z]+)\\.(.+)".r
  private val DEFAULT_METRICS_CONF_FILENAME = "metrics.properties"

  private[metrics] val properties = new Properties()
  private[metrics] var propertyCategories: mutable.HashMap[String, Properties] = null

  private def setDefaultProperties(prop: Properties) {
    prop.setProperty("*.sink.servlet.class", "org.apache.spark.metrics.sink.MetricsServlet")
    prop.setProperty("*.sink.servlet.path", "/metrics/json")
    prop.setProperty("master.sink.servlet.path", "/metrics/master/json")
    prop.setProperty("applications.sink.servlet.path", "/metrics/applications/json")
  }

  def initialize() {
    // Add default properties in case there's no properties file
    setDefaultProperties(properties)

    // If spark.metrics.conf is not set, try to get file in class path
    val isOpt: Option[InputStream] = configFile.map(new FileInputStream(_)).orElse {
      try {
        Option(Utils.getSparkClassLoader.getResourceAsStream(DEFAULT_METRICS_CONF_FILENAME))
      } catch {
        case e: Exception =>
          logError("Error loading default configuration file", e)
          None
      }
    }

    isOpt.foreach { is =>
      try {
        properties.load(is)
      } finally {
        is.close()
      }
    }

    propertyCategories = subProperties(properties, INSTANCE_REGEX)
    if (propertyCategories.contains(DEFAULT_PREFIX)) {
      import scala.collection.JavaConversions._

      val defaultProperty = propertyCategories(DEFAULT_PREFIX)
      for { (inst, prop) <- propertyCategories
            if (inst != DEFAULT_PREFIX)
            (k, v) <- defaultProperty
            if (prop.getProperty(k) == null) } {
        prop.setProperty(k, v)
      }
    }
  }

  def subProperties(prop: Properties, regex: Regex): mutable.HashMap[String, Properties] = {
    val subProperties = new mutable.HashMap[String, Properties]
    import scala.collection.JavaConversions._
    prop.foreach { kv =>
      if (regex.findPrefixOf(kv._1).isDefined) {
        val regex(prefix, suffix) = kv._1
        subProperties.getOrElseUpdate(prefix, new Properties).setProperty(suffix, kv._2)
      }
    }
    subProperties
  }

  def getInstance(inst: String): Properties = {
    propertyCategories.get(inst) match {
      case Some(s) => s
      case None => propertyCategories.getOrElse(DEFAULT_PREFIX, new Properties)
    }
  }
} 
Example 54
Source File: PropertiesConfig.scala    From DynaML   with Apache License 2.0 5 votes vote down vote up
package io.github.mandar2812.dynaml.utils.sumac

import collection._
import java.util.Properties
import java.io.{FileOutputStream, File, FileInputStream, BufferedInputStream}

import collection.JavaConverters._


trait PropertiesConfig extends ExternalConfig {
  self: Args =>

  var propertyFile: File = _

  abstract override def readArgs(originalArgs: Map[String,String]): Map[String,String] = {
    parse(originalArgs, false)

    val props = new Properties()
    if (propertyFile != null) {
      val in = new BufferedInputStream(new FileInputStream(propertyFile))
      props.load(in)
      in.close()
    }
    //append args we read from the property file to the args from the command line, and pass to next trait
    super.readArgs(ExternalConfigUtil.mapWithDefaults(originalArgs,props.asScala))
  }

  abstract override def saveConfig() {
    PropertiesConfig.saveConfig(this, propertyFile)
    super.saveConfig()
  }

}

object PropertiesConfig {
  def saveConfig(args: Args, propertyFile: File) {
    val props = new Properties()
    args.getStringValues.foreach{case(k,v) => props.put(k,v)}
    val out = new FileOutputStream(propertyFile)
    props.store(out, "")
    out.close()
  }
} 
Example 55
Source File: PreprocessSusy.scala    From DynaML   with Apache License 2.0 5 votes vote down vote up
package io.github.mandar2812.dynaml.examples

import java.io.{BufferedReader, FileInputStream, InputStreamReader}
import java.util.zip.GZIPInputStream

import com.github.tototoshi.csv.CSVWriter

import scala.util.Random

case class BufferedReaderIterator(reader: BufferedReader) extends Iterator[String] {
  override def hasNext() = reader.ready
  override def next() = reader.readLine()
}

object GzFileIterator {
  def apply(file: java.io.File, encoding: String): BufferedReader = {
    new BufferedReader(
      new InputStreamReader(
        new GZIPInputStream(
          new FileInputStream(file)), encoding))
  }
}

object PreprocessSusy {
  def apply(args: String = "") = {
    val iterator:BufferedReader = GzFileIterator(new java.io.File(args+"SUSY.csv.gz"),
      "US-ASCII")
    var line = iterator.readLine()
    val writer = CSVWriter.open(args+"susy.csv")
    val writert = CSVWriter.open(args+"susytest.csv")

    println("Outputting train and test csv files ...")
    while(line != null || line != "\n") {

      val row = line.split(',').reverse
      val procrow = Array.tabulate(row.length)((i) => {
        if(i == row.length-1) {
          val label = if(row(i).toDouble == 1.0) row(i).toDouble else -1.0
          label.toString
        } else {
          row(i)
        }
      })

      if(Random.nextDouble() <= 0.9)
      {
        writer.writeRow(procrow)
      } else {
        writert.writeRow(procrow)
      }
      line = iterator.readLine()
    }
    writer.close()
    writert.close()
    println("Done ...")
  }
} 
Example 56
Source File: PreprocessForestCover.scala    From DynaML   with Apache License 2.0 5 votes vote down vote up
package io.github.mandar2812.dynaml.examples

import java.io.{BufferedReader, FileInputStream, InputStreamReader}

import com.github.tototoshi.csv.CSVWriter

import scala.util.Random

object FileIterator {
  def apply(file: java.io.File, encoding: String): BufferedReader = {
    new BufferedReader(
      new InputStreamReader(
        new FileInputStream(file), encoding))
  }
}

object PreprocessForestCover {
  def apply(args: String = "") = {
    val iterator:BufferedReader = FileIterator(new java.io.File(args+"covtype.data"),
      "US-ASCII")
    var line = iterator.readLine()
    val writer = CSVWriter.open(args+"cover.csv")
    val writert = CSVWriter.open(args+"covertest.csv")

    println("Outputting train and test csv files ...")
    while(line != null) {

      val row = line.split(',')
      val procrow = Array.tabulate(row.length)((i) => {
        if(i == row.length-1) {
          val label = if(row(i).toDouble == 2.0) 1.0 else -1.0
          label.toString
        } else {
          row(i)
        }
      })

      if(Random.nextDouble() <= 0.9)
      {
        writer.writeRow(procrow)
      } else {
        writert.writeRow(procrow)
      }
      line = iterator.readLine()
    }
    writer.close()
    writert.close()
    println("Done ...")
  }
} 
Example 57
Source File: Zip$Test.scala    From mystem-scala   with MIT License 5 votes vote down vote up
package ru.stachek66.tools

import java.io.{File, FileInputStream}

import org.apache.commons.io.IOUtils
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner

import org.junit.runner.RunWith


class Zip$Test extends FunSuite {

  test("zip-test") {
    val src = new File("src/test/resources/test.txt")
    Zip.unpack(
      new File("src/test/resources/test.zip"),
      new File("src/test/resources/res.txt")) match {
      case f =>
        val content0 = IOUtils.toString(new FileInputStream(f))
        val content1 = IOUtils.toString(new FileInputStream(src))
        print(content0.trim + " vs " + content1.trim)
        assert(content0 === content1)
    }
  }

} 
Example 58
Source File: TarGz$Test.scala    From mystem-scala   with MIT License 5 votes vote down vote up
package ru.stachek66.tools

import java.io.{File, FileInputStream}

import org.apache.commons.io.IOUtils
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner


class TarGz$Test extends FunSuite {

  test("tgz-test") {
    val src = new File("src/test/resources/test.txt")
    TarGz.unpack(
      new File("src/test/resources/test.tar.gz"),
      new File("src/test/resources/res.txt")) match {
      case f =>
        val content0 = IOUtils.toString(new FileInputStream(f))
        val content1 = IOUtils.toString(new FileInputStream(src))
        print(content0.trim + " vs " + content1.trim)
        assert(content0 === content1)
    }
  }
} 
Example 59
Source File: FromFileInput.scala    From borer   with Mozilla Public License 2.0 5 votes vote down vote up
package io.bullet.borer.input

import java.io.{File, FileInputStream}
import java.nio.file.Files

import io.bullet.borer.{ByteAccess, Input}

trait FromFileInput { this: FromByteArrayInput with FromInputStreamInput =>

  implicit object FromFileProvider extends Input.Provider[File] {
    type Bytes = Array[Byte]
    def byteAccess         = ByteAccess.ForByteArray
    def apply(value: File) = fromFile(value)
  }

  def fromFile(file: File, bufferSize: Int = 16384): Input[Array[Byte]] = {
    if (bufferSize < 256) throw new IllegalArgumentException(s"bufferSize must be >= 256 but was $bufferSize")
    val fileSize = file.length()
    if (fileSize > bufferSize) fromInputStream(new FileInputStream(file), bufferSize)
    else fromByteArray(Files.readAllBytes(file.toPath))
  }

} 
Example 60
Source File: services.scala    From InteractiveGraph-neo4j   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package org.grapheco.server.pidb

import java.io.{File, FileInputStream}

import org.apache.commons.io.{FileUtils, IOUtils}
import org.grapheco.server.util.{JsonUtils, Logging, ServletContextUtils}
import org.neo4j.driver.v1._
import org.neo4j.graphdb.factory.{GraphDatabaseFactory, GraphDatabaseSettings}
import org.neo4j.graphdb.{GraphDatabaseService, Label, RelationshipType}
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.beans.factory.{DisposableBean, InitializingBean}
import cn.pidb.engine.{BoltService, CypherService, PidbConnector}

import scala.collection.JavaConversions._
import scala.collection.mutable
import scala.reflect.ClassTag



class PidbService(boltUrl:String, boltUser:String, boltPassword:String) extends BoltService(boltUrl, boltUser, boltPassword){


  def getRelativeOrAbsoluteFile(path: String) = {
    Some(new File(path)).map { file =>
      if (file.isAbsolute) {
        file
      }
      else {
        new File(ServletContextUtils.getServletContext.getRealPath(s"/${path}"))
      }
    }.get
  }
} 
Example 61
Source File: index_ceeaus_all.scala    From attic-nlp4l   with Apache License 2.0 5 votes vote down vote up
import java.io.File
import java.io.FileInputStream
import java.io.InputStreamReader
import java.io.BufferedReader
import java.nio.file.FileSystems
import org.apache.lucene.index._
import org.apache.lucene.search.TermQuery
import org.nlp4l.core.analysis.Analyzer
import org.nlp4l.core.analysis.AnalyzerBuilder
import org.nlp4l.core._

import scalax.file.Path
import scalax.file.PathSet

val index = "/tmp/index-ceeaus-all"

def lines(fl: Path, encoding: String): List[String] = {
  val is = new FileInputStream(fl.path)
  val r = new InputStreamReader(is, encoding)
  val br = new BufferedReader(r)
  var result: List[String] = Nil

  try{
    var line = br.readLine()
    while(line != null){
      result = result :+ line
      line = br.readLine()
    }
    result
  }
  finally{
    br.close
    r.close
    is.close
  }
}

def document(fl: Path, ja: Boolean): Document = {
  val ps: Array[String] = fl.path.split(File.separator)
  // for Windows
  // val ps: Array[String] = file.path.split("\\\\")
  val file = ps(3)
  val typ = ps(2)
  val cat = "all"
  val encoding = if(ja) "sjis" else "UTF-8"
  val body = lines(fl, encoding)
  val body_set = if(ja) Set(Field("body_ja", body)) else Set(Field("body_en", body), Field("body_ws", body))
  Document(Set(
    Field("file", file), Field("type", typ), Field("cat", cat)) ++ body_set
  )
}

// delete existing Lucene index
val p = Path(new File(index))
p.deleteRecursively()

// write documents into an index
val schema = SchemaLoader.loadFile("examples/schema/ceeaus.conf")
val writer = IWriter(index, schema)

val c: PathSet[Path] = Path("corpora", "CEEAUS", "PLAIN").children()
// write English docs
c.filter(e => e.name.indexOf("cjejus")<0 && e.name.endsWith(".txt")).toList.sorted.foreach(g => writer.write(document(g, false)))
// write English docs
c.filter(e => e.name.indexOf("cjejus")>=0 && e.name.endsWith(".txt")).toList.sorted.foreach(g => writer.write(document(g, true)))
writer.close

// search test
val searcher = ISearcher(index)
val results = searcher.search(query=new TermQuery(new Term("body_ja", "喫煙")), rows=10)

results.foreach(doc => {
  printf("[DocID] %d: %s\n", doc.docId, doc.get("file"))
})

// search test for ch4
val results2 = searcher.search(query=new TermQuery(new Term("body_ws", "still,")), rows=10)

results2.foreach(doc => {
  printf("[DocID] %d: %s\n", doc.docId, doc.get("file"))
}) 
Example 62
Source File: index_ceeaus.scala    From attic-nlp4l   with Apache License 2.0 5 votes vote down vote up
import java.io.File
import java.io.FileInputStream
import java.io.InputStreamReader
import java.io.BufferedReader
import java.nio.file.FileSystems
import org.apache.lucene.index._
import org.apache.lucene.search.TermQuery
import org.nlp4l.core.analysis.Analyzer
import org.nlp4l.core._

import scalax.file.Path
import scalax.file.PathSet

val index = "/tmp/index-ceeaus"

def lines(fl: Path, encoding: String): List[String] = {
  val is = new FileInputStream(fl.path)
  val r = new InputStreamReader(is, encoding)
  val br = new BufferedReader(r)
  var result: List[String] = Nil

  try{
    var line = br.readLine()
    while(line != null){
      result = result :+ line
      line = br.readLine()
    }
    result
  }
  finally{
    br.close
    r.close
    is.close
  }
}

def document(fl: Path, ja: Boolean): Document = {
  val ps: Array[String] = fl.path.split(File.separator)
  // for Windows
  // val ps: Array[String] = file.path.split("\\\\")
  val file = ps(3)
  val typ = ps(2)
  val cat = if(file.indexOf("smk") >= 0) "smk" else "ptj"   // smoking or part time job
  val encoding = if(ja) "sjis" else "UTF-8"
  val body = lines(fl, encoding)
  Document(Set(
    Field("file", file), Field("type", typ), Field("cat", cat),
    Field(if(ja) "body_ja" else "body_en", body)
  ))
}

// delete existing Lucene index
val p = Path(new File(index))
p.deleteRecursively()

// write documents into an index
val schema = SchemaLoader.loadFile("examples/schema/ceeaus.conf")
val writer = IWriter(index, schema)

val c: PathSet[Path] = Path("corpora", "CEEAUS").children()
// write English docs
c.toList.sorted.filter(e => e.name.indexOf("CJEJUS")<0 && e.name.indexOf("PLAIN")<0).foreach( f =>
  f.children().toList.sorted.filter( g => g.name.indexOf("(1)") < 0 && g.name.endsWith(".txt")).foreach(h => writer.write(document(h, false)))
)
// write Japanese docs
c.toList.sorted.filter(e => e.name.indexOf("CJEJUS")>=0).foreach( f =>
  f.children().toList.sorted.filter( g => g.name.indexOf("(1)") < 0 && g.name.endsWith(".txt")).foreach(h => writer.write(document(h, true)))
)
writer.close

// search
val searcher = ISearcher(index)
val results = searcher.search(query=new TermQuery(new Term("body_ja", "喫煙")), rows=10)

results.foreach(doc => {
  printf("[DocID] %d: %s\n", doc.docId, doc.get("file"))
}) 
Example 63
Source File: BytecodeUtil.scala    From sbt-jni   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package ch.jodersky.sbt.jni
package util

import java.io.{ File, FileInputStream, Closeable }
import scala.collection.mutable.{ HashSet }

import org.objectweb.asm.{ ClassReader, ClassVisitor, MethodVisitor, Opcodes }

object BytecodeUtil {

  private class NativeFinder extends ClassVisitor(Opcodes.ASM5) {

    // classes found to contain at least one @native def
    val _nativeClasses = new HashSet[String]
    def nativeClasses = _nativeClasses.toSet

    private var fullyQualifiedName: String = ""

    override def visit(version: Int, access: Int, name: String, signature: String,
      superName: String, interfaces: Array[String]): Unit = {
      fullyQualifiedName = name.replaceAll("/", ".")
    }

    override def visitMethod(access: Int, name: String, desc: String,
      signature: String, exceptions: Array[String]): MethodVisitor = {

      val isNative = (access & Opcodes.ACC_NATIVE) != 0

      if (isNative) {
        _nativeClasses += fullyQualifiedName
      }

      null //return null, do not visit method further
    }

  }

  private def using[A >: Null <: Closeable, R](mkStream: => A)(action: A => R): R = {
    var stream: A = null
    try {
      stream = mkStream
      action(stream)
    } finally {
      if (stream != null) {
        stream.close()
      }
    }
  }

  
  def nativeClasses(classFile: File): Set[String] = using(new FileInputStream(classFile)) { in =>
    val reader = new ClassReader(in)
    val finder = new NativeFinder
    reader.accept(finder, 0)
    finder.nativeClasses
  }

} 
Example 64
Source File: UsesSslContext.scala    From scala-commons   with MIT License 5 votes vote down vote up
package com.avsystem.commons
package redis

import java.io.FileInputStream
import java.security.{KeyStore, SecureRandom}

import javax.net.ssl.{KeyManagerFactory, SSLContext, SSLEngine, TrustManagerFactory}

trait UsesSslContext {
  lazy val sslContext: SSLContext = SSLContext.getInstance("TLSv1.2").setup { sslc =>
    val ks = KeyStore.getInstance("PKCS12")
    ks.load(new FileInputStream("./tls/redis.p12"), Array.empty)

    val kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm)
    kmf.init(ks, Array.empty)

    val tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm)
    tmf.init(ks)

    sslc.init(kmf.getKeyManagers, tmf.getTrustManagers, new SecureRandom)
  }
} 
Example 65
Source File: Compression.scala    From databus-maven-plugin   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.dbpedia.databus.lib

import better.files._
import java.io.{BufferedInputStream, FileInputStream, InputStream}
import com.codahale.metrics.MetricRegistry
import org.apache.commons.compress.archivers.{ArchiveEntry, ArchiveException, ArchiveInputStream, ArchiveStreamFactory}
import org.apache.commons.compress.compressors.{CompressorException, CompressorInputStream, CompressorStreamFactory}

import scala.util.Try

object Compression {

  def detectCompression(datafile: File): Option[String] = {
    try {
      Some(datafile.inputStream.map(_.buffered).apply(CompressorStreamFactory.detect))
    } catch {
      case ce: CompressorException => None
    }
  }

  def detectArchive(datafile: File): Option[String] = {
    try {
      Some(datafile.inputStream.map(_.buffered).apply(ArchiveStreamFactory.detect))
    } catch {
      case ce: ArchiveException => None
    }
  }
} 
Example 66
Source File: scrimage.scala    From scastie   with Apache License 2.0 5 votes vote down vote up
// scrimage-core, scrimage-filters

import com.sksamuel.scrimage._, filter._
import java.io.{File, FileInputStream}
import java.net.URL
import java.nio.file.{Files, Paths}
import scala.util.Try

// Download image to cache
val dest = Paths.get("/tmp/scastie/lanzarote.jpg")
if (!Files.exists(dest)) {
  Files.createDirectories(dest.getParent)
  val url = new URL("https://github.com/sksamuel/scrimage/blob/master/scrimage-core/src/test/resources/lanzarote.jpg?raw=true")
  Try(url.openStream()).foreach(src => Files.copy(src, dest))
}
val image = Image.fromStream(new FileInputStream(new File("/tmp/scastie/lanzarote.jpg")))
val small = image.scaleToWidth(200)


toBase64(small)

toBase64(small.filter(SepiaFilter)) 
Example 67
Source File: Conf.scala    From CkoocNLP   with Apache License 2.0 5 votes vote down vote up
package config

import java.io.{File, FileInputStream, InputStreamReader}
import java.util.Properties

import scala.collection.mutable


  def loadConf(filePath: String): mutable.LinkedHashMap[String, String] = {
    val kvMap = mutable.LinkedHashMap[String, String]()

    val properties = new Properties()
    properties.load(new InputStreamReader(new FileInputStream(filePath), "UTF-8"))
    val propertyNameArray = properties.stringPropertyNames().toArray(new Array[String](0))

    val fileName = new File(filePath).getName

    println(s"============ 加载配置文件 $fileName ================")
    for (propertyName <- propertyNameArray) {
      val property = properties.getProperty(propertyName).replaceAll("\"", "").trim
      println(propertyName + ": " + property)
      kvMap.put(propertyName, property)
    }
    println("==========================================================")

    kvMap
  }
} 
Example 68
Source File: IoTest.scala    From fgbio   with MIT License 5 votes vote down vote up
package com.fulcrumgenomics.util

import java.io.{BufferedInputStream, BufferedReader, FileInputStream, InputStreamReader}
import java.util.zip.GZIPInputStream

import com.fulcrumgenomics.testing.UnitSpec
import htsjdk.samtools.util.BlockCompressedInputStream


class IoTest extends UnitSpec {

  Seq(".bgz", ".bgzip").foreach { ext =>
    it should s"round trip data to a bgzipped file with extension ${ext}" in {
      val text = "This is a stupid little text fragment for compression. Yay compression!"
      val data = Seq.fill(10)(text)
      val f = makeTempFile("test.", ext)
      Io.writeLines(f, data)

      val stream = new BufferedInputStream(new FileInputStream(f.toFile))
      BlockCompressedInputStream.isValidFile(stream) shouldBe true
      val reread = Io.readLines(f).toIndexedSeq

      reread shouldBe data
    }
  }
} 
Example 69
Source File: ZipUtil.scala    From coursier   with Apache License 2.0 5 votes vote down vote up
import java.util.zip.{ZipEntry, ZipInputStream, ZipOutputStream}
import java.io.{ByteArrayOutputStream, File, FileInputStream, FileOutputStream, InputStream}

object ZipUtil {

  def addToZip(sourceZip: File, destZip: File, extra: Seq[(String, Array[Byte])]): Unit = {
    
    val is = new FileInputStream(sourceZip)
    val os = new FileOutputStream(destZip)
    val bootstrapZip = new ZipInputStream(is)
    val outputZip = new ZipOutputStream(os)

    def readFullySync(is: InputStream) = {
      val buffer = new ByteArrayOutputStream
      val data = Array.ofDim[Byte](16384)

      var nRead = is.read(data, 0, data.length)
      while (nRead != -1) {
        buffer.write(data, 0, nRead)
        nRead = is.read(data, 0, data.length)
      }

      buffer.flush()
      buffer.toByteArray
    }

    def zipEntries(zipStream: ZipInputStream): Iterator[(ZipEntry, Array[Byte])] =
      new Iterator[(ZipEntry, Array[Byte])] {
        private var nextEntry = Option.empty[ZipEntry]
        private def update() =
          nextEntry = Option(zipStream.getNextEntry)

        update()

        def hasNext = nextEntry.nonEmpty
        def next() = {
          val ent = nextEntry.get
          val data = readFullySync(zipStream)

          update()

          (ent, data)
        }
      }

    val extraNames = extra.map(_._1).toSet

    for ((ent, data) <- zipEntries(bootstrapZip) if !extraNames(ent.getName)) {

      // Same workaround as https://github.com/spring-projects/spring-boot/issues/13720
      // (https://github.com/spring-projects/spring-boot/commit/a50646b7cc3ad941e748dfb450077e3a73706205#diff-2ff64cd06c0b25857e3e0dfdb6733174R144)
      ent.setCompressedSize(-1L)

      outputZip.putNextEntry(ent)
      outputZip.write(data)
      outputZip.closeEntry()
    }

    for ((dest, data) <- extra) {
      outputZip.putNextEntry(new ZipEntry(dest))
      outputZip.write(data)
      outputZip.closeEntry()
    }

    outputZip.close()

    is.close()
    os.close()

  }

} 
Example 70
Source File: ArtifactsLock.scala    From coursier   with Apache License 2.0 5 votes vote down vote up
package coursier.install

import java.io.{File, FileInputStream}
import java.math.BigInteger
import java.security.MessageDigest

import cats.implicits._
import coursier.cache.internal.FileUtil
import coursier.util.Artifact
import dataclass.data

@data class ArtifactsLock (
  entries: Set[ArtifactsLock.Entry]
) {
  def repr: String =
    entries
      .toVector
      .map { e =>
        s"${e.url}#${e.checksumType}:${e.checksum}"
      }
      .sorted
      .mkString("\n")
}

object ArtifactsLock {

  @data class Entry(
    url: String,
    checksumType: String,
    checksum: String
  )

  def read(input: String): Either[String, ArtifactsLock] =
    input
      .split('\n')
      .map(_.trim)
      .zipWithIndex
      .filter(_._1.nonEmpty)
      .toList
      .traverse {
        case (line, lineNum) =>
          val idx = line.indexOf('#')
          if (idx < 0)
            Left(s"Malformed line ${lineNum + 1}")
          else {
            val url = line.take(idx)
            val checksumPart = line.drop(idx + 1)
            val idx0 = checksumPart.indexOf(':')
            if (idx0 < 0)
              Left(s"Malformed line ${lineNum + 1}")
            else {
              val checksumType = checksumPart.take(idx0)
              val checksum = checksumPart.drop(idx0 + 1)
              Right(Entry(url, checksumType, checksum))
            }
          }
      }
      .map { entries =>
        ArtifactsLock(entries.toSet)
      }

  private def sha1(f: File): String = {
    val md = MessageDigest.getInstance("SHA-1")

    var is: FileInputStream = null
    try {
      is = new FileInputStream(f)
      FileUtil.withContent(is, new FileUtil.UpdateDigest(md))
    } finally is.close()

    val b = md.digest()
    new BigInteger(1, b).toString(16)
  }

  def ofArtifacts(artifacts: Seq[(Artifact, File)]): ArtifactsLock = {

    val entries = artifacts.map {
      case (a, f) =>
        Entry(a.url, "SHA-1", sha1(f))
    }

    ArtifactsLock(entries.toSet)
  }
} 
Example 71
Source File: package.scala    From coursier   with Apache License 2.0 5 votes vote down vote up
package coursier.test

import java.io.{File, FileInputStream}
import java.nio.charset.StandardCharsets.UTF_8
import java.nio.file.{Files, Paths}

import coursier.cache.MockCache
import coursier.core.Repository
import coursier.paths.Util
import coursier.util.{Sync, Task}
import coursier.Platform

import scala.concurrent.{ExecutionContext, Future}

package object compatibility {

  private val pool = Sync.fixedThreadPool(6)
  implicit val executionContext = scala.concurrent.ExecutionContext.fromExecutorService(pool)

  def textResource(path: String)(implicit ec: ExecutionContext): Future[String] = Future {
    val f = new File("modules/tests/shared/src/test/resources/" + path)
    var is0: FileInputStream = null
    try {
      is0 = new FileInputStream(f)
      new String(Platform.readFullySync(is0), UTF_8)
    } finally {
      if (is0 != null)
        is0.close()
    }
  }

  private val baseRepo = {
    val dir = Paths.get("modules/tests/metadata")
    assert(Files.isDirectory(dir))
    dir
  }

  private val fillChunks = Option(System.getenv("FETCH_MOCK_DATA"))
    .exists(s => s == "1" || s == "true")

  def artifact[F[_]: Sync]: Repository.Fetch[F] =
    MockCache.create[F](baseRepo, writeMissing = fillChunks, pool = pool).fetch

  val taskArtifact = artifact[Task]

  private lazy val baseResources = {
    val dir = Paths.get("modules/tests/shared/src/test/resources")
    assert(Files.isDirectory(dir))
    dir
  }

  def tryCreate(path: String, content: String): Unit =
    if (fillChunks) {
      val path0 = baseResources.resolve(path)
      Util.createDirectories(path0.getParent)
      Files.write(path0, content.getBytes(UTF_8))
    }

} 
Example 72
Source File: FileCredentials.scala    From coursier   with Apache License 2.0 5 votes vote down vote up
package coursier.credentials

import java.io.{File, FileInputStream, StringReader}
import java.nio.charset.Charset
import java.nio.file.{Files, Paths}
import java.util.Properties

import dataclass.data

import scala.collection.JavaConverters._

@data class FileCredentials(
  path: String,
  optional: Boolean = true
) extends Credentials {

  def get(): Seq[DirectCredentials] = {

    val f = Paths.get(path)

    if (Files.isRegularFile(f)) {
      val content = new String(Files.readAllBytes(f), Charset.defaultCharset())
      FileCredentials.parse(content, path)
    } else if (optional)
      Nil
    else
      throw new Exception(s"Credential file $path not found")
  }
}

object FileCredentials {

  def parse(content: String, origin: String): Seq[DirectCredentials] = {

    val props = new Properties
    props.load(new StringReader(content))

    val userProps = props
      .propertyNames()
      .asScala
      .map(_.asInstanceOf[String])
      .filter(_.endsWith(".username"))
      .toVector

    userProps.map { userProp =>
      val prefix = userProp.stripSuffix(".username")

      val user = props.getProperty(userProp)
      val password = Option(props.getProperty(s"$prefix.password")).getOrElse {
        throw new Exception(s"Property $prefix.password not found in $origin")
      }

      val host = Option(props.getProperty(s"$prefix.host")).getOrElse {
        throw new Exception(s"Property $prefix.host not found in $origin")
      }

      val realmOpt = Option(props.getProperty(s"$prefix.realm")) // filter if empty?

      val matchHost = Option(props.getProperty(s"$prefix.auto")).fold(DirectCredentials.defaultMatchHost)(_.toBoolean)
      val httpsOnly = Option(props.getProperty(s"$prefix.https-only")).fold(DirectCredentials.defaultHttpsOnly)(_.toBoolean)
      val passOnRedirect = Option(props.getProperty(s"$prefix.pass-on-redirect")).fold(false)(_.toBoolean)

      DirectCredentials(host, user, password)
        .withRealm(realmOpt)
        .withMatchHost(matchHost)
        .withHttpsOnly(httpsOnly)
        .withPassOnRedirect(passOnRedirect)
    }
  }

} 
Example 73
Source File: WordEmbeddingsLoader.scala    From spark-nlp   with Apache License 2.0 5 votes vote down vote up
package com.johnsnowlabs.nlp.embeddings

import java.io.{BufferedInputStream, ByteArrayOutputStream, DataInputStream, FileInputStream}

import com.johnsnowlabs.storage.RocksDBConnection
import org.slf4j.LoggerFactory

import scala.io.Source

object WordEmbeddingsTextIndexer {

  def index(
             source: Iterator[String],
             writer: WordEmbeddingsWriter
           ): Unit = {
    try {
      for (line <- source) {
        val items = line.split(" ")
        val word = items(0)
        val embeddings = items.drop(1).map(i => i.toFloat)
        writer.add(word, embeddings)
      }
    } finally {
      writer.close()
    }
  }

  def index(
             source: String,
             writer: WordEmbeddingsWriter
           ): Unit = {
    val sourceFile = Source.fromFile(source)("UTF-8")
    val lines = sourceFile.getLines()
    index(lines, writer)
    sourceFile.close()
  }
}


object WordEmbeddingsBinaryIndexer {

  private val logger = LoggerFactory.getLogger("WordEmbeddings")

  def index(
             source: DataInputStream,
             writer: WordEmbeddingsWriter): Unit = {

    try {
      // File Header
      val numWords = Integer.parseInt(readString(source))
      val vecSize = Integer.parseInt(readString(source))

      // File Body
      for (i <- 0 until numWords) {
        val word = readString(source)

        // Unit Vector
        val vector = readFloatVector(source, vecSize, writer)
        writer.add(word, vector)
      }

      logger.info(s"Loaded $numWords words, vector size $vecSize")
    } finally {
      writer.close()
    }
  }

  def index(
             source: String,
             writer: WordEmbeddingsWriter): Unit = {

    val ds = new DataInputStream(new BufferedInputStream(new FileInputStream(source), 1 << 15))

    try {
      index(ds, writer)
    } finally {
      ds.close()
    }
  }

  
  private def readFloatVector(ds: DataInputStream, vectorSize: Int, indexer: WordEmbeddingsWriter): Array[Float] = {
    // Read Bytes
    val vectorBuffer = Array.fill[Byte](4 * vectorSize)(0)
    ds.read(vectorBuffer)

    // Convert Bytes to Floats
    indexer.fromBytes(vectorBuffer)
  }
} 
Example 74
Source File: TextureMappedPropertyIO.scala    From parametric-face-image-generator   with Apache License 2.0 5 votes vote down vote up
package faces.utils

import java.io.{File, FileInputStream, FileOutputStream}

import scalismo.faces.color.{ColorSpaceOperations, RGBA}
import scalismo.faces.image.BufferedImageConverter
import scalismo.faces.io.{MeshIO, PixelImageIO}
import scalismo.faces.mesh.{ColorNormalMesh3D, TextureMappedProperty}
import scalismo.geometry.{Point, _2D}
import scalismo.mesh.{MeshSurfaceProperty, TriangleCell, TriangleList}
import spray.json.JsObject

import scala.reflect.ClassTag
import scala.util.Try
import spray.json._

object TextureMappedPropertyIO extends App {

  import scalismo.faces.io.renderparameters.RenderParameterJSONFormatV2._

  import scalismo.faces.io.RenderParameterIO._
  def read[A: ClassTag](directory: String, stem: String)(implicit converter: BufferedImageConverter[A], ops: ColorSpaceOperations[A]): TextureMappedProperty[A] = read[A](new File(directory+"/"+stem+".json"),new File(directory+"/"+stem+".png"))

  def read[A: ClassTag](mappingFile: File, imageFile: File)(implicit converter: BufferedImageConverter[A],  ops: ColorSpaceOperations[A]) : TextureMappedProperty[A] = {

    import scalismo.faces.io.RenderParameterIO.readASTFromStream

    val fields = readASTFromStream(new FileInputStream(mappingFile)).asJsObject.fields
    val triangles = fields("triangles").convertTo[IndexedSeq[TriangleCell]]
    val triangulation = TriangleList(triangles)

    val textureMapping = fields("textureMapping").convertTo[MeshSurfaceProperty[Point[_2D]]]

    val texture = PixelImageIO.read[A](imageFile).get

    TextureMappedProperty[A](triangulation, textureMapping, texture)
  }

  def write[A:ClassTag](textureMappedProperty: TextureMappedProperty[A], directory: String, stem: String)(implicit converter: BufferedImageConverter[A]): Try[Unit] = Try {
    val writeImage = PixelImageIO.write(
      textureMappedProperty.texture,
      new File(directory+"/"+stem+".png")
    ).get

    val mapping = JsObject(
      "triangles" -> textureMappedProperty.triangulation.triangles.toJson,
      "textureMapping" -> textureMappedProperty.textureMapping.toJson,
      "@type" -> "TextureMappedProperty".toJson
    )

    val os = new FileOutputStream(new File(directory+"/"+stem+".json"))
    writeASTToStream(mapping, os)
  }

} 
Example 75
Source File: Utils.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.localfaas

import java.io.FileInputStream

import better.files.File
import org.apache.commons.compress.archivers.{ArchiveEntry, ArchiveStreamFactory}
import org.apache.commons.compress.utils.IOUtils

import scala.util.{Failure, Try}

object Utils {
  def unzip(source: File, target: File): Unit = {
    val inputStream   = new FileInputStream(source.path.toFile)
    val archiveStream = new ArchiveStreamFactory().createArchiveInputStream(ArchiveStreamFactory.ZIP, inputStream)

    def stream: Stream[ArchiveEntry] = archiveStream.getNextEntry match {
      case null  => Stream.empty
      case entry => entry #:: stream
    }

    def closeStreams = {
      archiveStream.close()
      inputStream.close()
    }

    Try {
      for (entry <- stream if !entry.isDirectory) {
        val outFile = (target / entry.getName).createIfNotExists(asDirectory = false, createParents = true).clear()
        val os      = outFile.newOutputStream

        Try { IOUtils.copy(archiveStream, os) } match {
          case Failure(e) => os.close(); throw e
          case _          => os.close()
        }
      }
    } match {
      case Failure(e) => closeStreams; throw e
      case _          => closeStreams
    }
  }
} 
Example 76
Source File: FileUtilities.scala    From mmlspark   with MIT License 5 votes vote down vote up
// Copyright (C) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See LICENSE in project root for information.

package com.microsoft.ml.spark.core.env

import java.io.File
import java.nio.file.{Files, StandardCopyOption}

import scala.io.{BufferedSource, Source}

object FileUtilities {

  def join(folders: String*): File = {
    folders.tail.foldLeft(new File(folders.head)) { case (f, s) => new File(f, s) }
  }

  def join(base: File, folders: String*): File = {
    folders.foldLeft(base) { case (f, s) => new File(f, s) }
  }

  // Same for StandardOpenOption
  type StandardOpenOption = java.nio.file.StandardOpenOption
  object StandardOpenOption {
    import java.nio.file.{StandardOpenOption => S}
    val APPEND = S.APPEND
    val CREATE = S.CREATE
  }

  def allFiles(dir: File, pred: (File => Boolean) = null): Array[File] = {
    def loop(dir: File): Array[File] = {
      val (dirs, files) = dir.listFiles.sorted.partition(_.isDirectory)
      (if (pred == null) files else files.filter(pred)) ++ dirs.flatMap(loop)
    }
    loop(dir)
  }

  // readFile takes a file name or a File, and function to extract a value from
  // BufferedSource which defaults to _.mkString; performs the read, closes the
  // source, and returns the result
  def readFile[T](file: File, read: BufferedSource => T): T = {
    val i = Source.fromFile(file)
    try read(i) finally i.close
  }
  def readFile(file: File): String = readFile(file, _.mkString)

  def writeFile(file: File, stuff: Any, flags: StandardOpenOption*): Unit = {
    Files.write(file.toPath, stuff.toString.getBytes(), flags: _*)
    ()
  }

  def copyFile(from: File, toDir: File, overwrite: Boolean = false): Unit = {
    Files.copy(from.toPath, (new File(toDir, from.getName)).toPath,
               (if (overwrite) Seq(StandardCopyOption.REPLACE_EXISTING)
                else Seq()): _*)
    ()
  }

  // Perhaps this should move into a more specific place, not a generic file utils thing
  def zipFolder(dir: File, out: File): Unit = {
    import java.io.{BufferedInputStream, FileInputStream, FileOutputStream}
    import java.util.zip.{ZipEntry, ZipOutputStream}
    val bufferSize = 2 * 1024
    val data = new Array[Byte](bufferSize)
    val zip = new ZipOutputStream(new FileOutputStream(out))
    val prefixLen = dir.getParentFile.toString.length + 1
    allFiles(dir).foreach { file =>
      zip.putNextEntry(new ZipEntry(file.toString.substring(prefixLen).replace(java.io.File.separator, "/")))
      val in = new BufferedInputStream(new FileInputStream(file), bufferSize)
      var b = 0
      while (b >= 0) { zip.write(data, 0, b); b = in.read(data, 0, bufferSize) }
      in.close()
      zip.closeEntry()
    }
    zip.close()
  }

} 
Example 77
Source File: AndroidXMLParser.scala    From Argus-SAF   with Apache License 2.0 5 votes vote down vote up
package org.argus.amandroid.core.parser

import java.io.File
import java.util.zip.ZipEntry
import java.util.zip.ZipInputStream
import java.io.FileInputStream


	def handleAndroidXMLFiles(apk: File, fileNameFilter: Set[String],
			handler: AndroidXMLHandler): Unit = {

		try {
			var archive: ZipInputStream = null
			try {
				archive = new ZipInputStream(new FileInputStream(apk))
				var entry: ZipEntry = null
				entry = archive.getNextEntry
				while (entry != null) {
					val entryName = entry.getName
					handler.handleXMLFile(entryName, fileNameFilter, archive)
					entry = archive.getNextEntry
				}
			}
			finally {
				if (archive != null)
					archive.close()
			}
		}
		catch {
		  case e: Exception =>
				e.printStackTrace()
				throw e
		}
	}
} 
Example 78
Source File: AmandroidSettings.scala    From Argus-SAF   with Apache License 2.0 5 votes vote down vote up
package org.argus.amandroid.core

import java.io.{File, FileInputStream, InputStream}

import org.ini4j.Wini
import org.argus.jawa.core.util.FileUtil


class AmandroidSettings(amandroid_home: String, iniPathOpt: Option[String]) {
  private val amandroid_home_uri = FileUtil.toUri(amandroid_home)
  private def defaultLibFiles =
    amandroid_home + "/androidSdk/android-25/android.jar" + java.io.File.pathSeparator +
    amandroid_home + "/androidSdk/support/v4/android-support-v4.jar" + java.io.File.pathSeparator +
    amandroid_home + "/androidSdk/support/v13/android-support-v13.jar" + java.io.File.pathSeparator +
    amandroid_home + "/androidSdk/support/v7/android-support-v7-appcompat.jar"
  private def defaultThirdPartyLibFile = amandroid_home + "/liblist.txt"
  private val iniUri = {
    iniPathOpt match {
      case Some(path) => FileUtil.toUri(path)
      case None => FileUtil.appendFileName(amandroid_home_uri, "config.ini")
    }
  }
  private val ini = new Wini(FileUtil.toFile(iniUri))
  def timeout: Int = Option(ini.get("analysis", "timeout", classOf[Int])).getOrElse(5)
  def dependence_dir: Option[String] = Option(ini.get("general", "dependence_dir", classOf[String]))
  def debug: Boolean = ini.get("general", "debug", classOf[Boolean])
  def lib_files: String = Option(ini.get("general", "lib_files", classOf[String])).getOrElse(defaultLibFiles)
  def third_party_lib_file: String = Option(ini.get("general", "third_party_lib_file", classOf[String])).getOrElse(defaultThirdPartyLibFile)
  def actor_conf_file: InputStream = Option(ini.get("concurrent", "actor_conf_file", classOf[String])) match {
    case Some(path) => new FileInputStream(path)
    case None => getClass.getResourceAsStream("/application.conf")
  }
  def static_init: Boolean = ini.get("analysis", "static_init", classOf[Boolean])
  def parallel: Boolean = ini.get("analysis", "parallel", classOf[Boolean])
  def k_context: Int = ini.get("analysis", "k_context", classOf[Int])
  def sas_file: String = Option(ini.get("analysis", "sas_file", classOf[String])).getOrElse(amandroid_home + File.separator + "taintAnalysis" + File.separator + "sourceAndSinks" + File.separator + "TaintSourcesAndSinks.txt")
  def native_sas_file: String = Option(ini.get("analysis", "sas_file", classOf[String])).getOrElse(amandroid_home + File.separator + "taintAnalysis" + File.separator + "sourceAndSinks" + File.separator + "NativeSourcesAndSinks.txt")
  def injection_sas_file: String = Option(ini.get("analysis", "injection_sas_file", classOf[String])).getOrElse(amandroid_home + File.separator + "taintAnalysis" + File.separator + "sourceAndSinks" + File.separator + "IntentInjectionSourcesAndSinks.txt")
} 
Example 79
Source File: GuessAppPackagesTest.scala    From Argus-SAF   with Apache License 2.0 5 votes vote down vote up
package org.argus.amandroid.core.util

import java.io.{File, FileInputStream}

import org.argus.amandroid.core.parser.ManifestParser
import org.scalatest.{FlatSpec, Matchers}

import scala.language.implicitConversions

class GuessAppPackagesTest extends FlatSpec with Matchers {
  implicit def manifest(path: String): TestManifest = new TestManifest(getClass.getResource(path).getPath)

  "/manifests/AndroidManifest1.xml" gen_pkg_names (
    "com.gamesdj.desafiando",
    "com.google.android.gms",
    "com.unity3d"
  )

  "/manifests/AndroidManifest2.xml" gen_pkg_names "org.arguslab.icc_implicit_action"

  class TestManifest(path: String) {
    def gen_pkg_names(expected: String*): Unit = {
      path should "generate pkg name as expected" in {
        val manifestIS = new FileInputStream(new File(path))
        val mfp = new ManifestParser
        mfp.loadClassesFromTextManifest(manifestIS)
        manifestIS.close()
        val guessed = GuessAppPackages.guess(mfp)
        assert(guessed.size == expected.size && (guessed -- expected).isEmpty)
      }
    }
  }
} 
Example 80
Source File: DeployConfig.scala    From hail   with MIT License 5 votes vote down vote up
package is.hail.services

import java.io.{File, FileInputStream}

import is.hail.utils._
import org.json4s.{DefaultFormats, Formats, JValue}
import org.json4s.jackson.JsonMethods

object DeployConfig {
  lazy val get: DeployConfig = fromConfigFile()

  def fromConfigFile(file0: String = null): DeployConfig = {
    var file = file0

    if (file == null)
      file = System.getenv("HAIL_DEPLOY_CONFIG_FILE")

    if (file == null) {
      val fromHome = s"${ System.getenv("HOME") }/.hail/deploy-config.json"
      if (new File(fromHome).exists())
        file = fromHome
    }

    if (file == null) {
      val f = "/deploy-config/deploy-config.json"
      if (new File(f).exists())
        file = f
    }

    if (file != null) {
      using(new FileInputStream(file)) { in =>
        fromConfig(JsonMethods.parse(in))
      }
    } else
      new DeployConfig(
        "external",
        "default",
        Map())
  }

  def fromConfig(config: JValue): DeployConfig = {
    implicit val formats: Formats = DefaultFormats
    new DeployConfig(
      (config \ "location").extract[String],
      (config \ "default_namespace").extract[String],
      (config \ "service_namespace").extract[Map[String, String]])
  }
}

class DeployConfig(
  val location: String,
  val defaultNamespace: String,
  val serviceNamespace: Map[String, String]) {

  def scheme(baseScheme: String = "http"): String = {
    if (location == "external" || location == "k8s")
      baseScheme + "s"
    else
      baseScheme
  }

  def getServiceNamespace(service: String): String = {
    serviceNamespace.getOrElse(service, defaultNamespace)
  }

  def domain(service: String): String = {
    val ns = getServiceNamespace(service)
    location match {
      case "k8s" =>
        s"$service.$ns"
      case "gce" =>
        if (ns == "default")
          s"$service.hail"
        else
          "internal.hail"
      case "external" =>
        if (ns == "default")
          s"$service.hail.is"
        else
          "internal.hail.is"
    }
  }

  def basePath(service: String): String = {
    val ns = getServiceNamespace(service)
    if (ns == "default")
      ""
    else
      s"/$ns/$service"
  }

  def baseUrl(service: String, baseScheme: String = "http"): String = {
    s"${ scheme(baseScheme) }://${ domain(service) }${ basePath(service) }"
  }
} 
Example 81
Source File: package.scala    From hail   with MIT License 5 votes vote down vote up
package is.hail.services

import is.hail.utils._
import org.json4s.{DefaultFormats, Formats}
import java.io.{File, FileInputStream}
import java.security.KeyStore

import javax.net.ssl.{KeyManagerFactory, SSLContext, TrustManagerFactory}
import org.apache.log4j.{LogManager, Logger}
import org.json4s.jackson.JsonMethods

class NoSSLConfigFound(
  message: String,
  cause: Throwable
) extends Exception(message, cause) {
  def this() = this(null, null)

  def this(message: String) = this(message, null)
}

case class SSLConfig(
  outgoing_trust: String,
  outgoing_trust_store: String,
  incoming_trust: String,
  incoming_trust_store: String,
  key: String,
  cert: String,
  key_store: String)

package object tls {
  lazy val log: Logger = LogManager.getLogger("is.hail.tls")

  private[this] lazy val _getSSLConfig: SSLConfig = {
    var configFile = System.getenv("HAIL_SSL_CONFIG_FILE")
    if (configFile == null)
      configFile = "/ssl-config/ssl-config.json"
    if (!new File(configFile).isFile)
      throw new NoSSLConfigFound(s"no ssl config file found at $configFile")

    log.info(s"ssl config file found at $configFile")

    using(new FileInputStream(configFile)) { is =>
      implicit val formats: Formats = DefaultFormats
      JsonMethods.parse(is).extract[SSLConfig]
    }
  }

  lazy val getSSLContext: SSLContext = {
    val sslConfig = _getSSLConfig

    val pw = "dummypw".toCharArray

    val ks = KeyStore.getInstance("PKCS12")
    using(new FileInputStream(sslConfig.key_store)) { is =>
      ks.load(is, pw)
    }
    val kmf = KeyManagerFactory.getInstance("SunX509")
    kmf.init(ks, pw)

    val ts = KeyStore.getInstance("JKS")
    using(new FileInputStream(sslConfig.outgoing_trust_store)) { is =>
      ts.load(is, pw)
    }
    val tmf = TrustManagerFactory.getInstance("SunX509")
    tmf.init(ts)

    val ctx = SSLContext.getInstance("TLS")
    ctx.init(kmf.getKeyManagers, tmf.getTrustManagers, null)

    ctx
  }
} 
Example 82
Source File: Tokens.scala    From hail   with MIT License 5 votes vote down vote up
package is.hail.services

import is.hail.utils._
import java.io.{File, FileInputStream}

import org.apache.http.client.methods.HttpUriRequest
import org.apache.log4j.{LogManager, Logger}
import org.json4s.{DefaultFormats, Formats}
import org.json4s.jackson.JsonMethods

object Tokens {
  lazy val log: Logger = LogManager.getLogger("Tokens")

  def get: Tokens = {
    val file = getTokensFile()
    if (new File(file).isFile) {
      using(new FileInputStream(file)) { is =>
        implicit val formats: Formats = DefaultFormats
        new Tokens(JsonMethods.parse(is).extract[Map[String, String]])
      }
    } else {
      log.info(s"tokens file not found: $file")
      new Tokens(Map())
    }
  }

  def getTokensFile(): String = {
    if (DeployConfig.get.location == "external")
      s"${ System.getenv("HOME") }/.hail/tokens.json"
    else
      "/user-tokens/tokens.json"
  }
}

class Tokens(
  tokens: Map[String, String]
) {
  def namespaceToken(ns: String): String = tokens(ns)

  def addNamespaceAuthHeaders(ns: String, req: HttpUriRequest): Unit = {
    val token = namespaceToken(ns)
    req.addHeader("Authorization", s"Bearer $token")
    val location = DeployConfig.get.location
    if (location == "external" && ns != "default")
      req.addHeader("X-Hail-Internal-Authorization", s"Bearer ${ namespaceToken("default") }")
  }

  def addServiceAuthHeaders(service: String, req: HttpUriRequest): Unit = {
    addNamespaceAuthHeaders(DeployConfig.get.getServiceNamespace(service), req)
  }
} 
Example 83
Source File: IngesterMain.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package cmwell.tools.data.ingester

import java.io.FileInputStream
import java.util.zip.GZIPInputStream

import akka.stream.scaladsl.Sink
import cmwell.tools.data.utils.akka.stats.IngesterStats
//import cmwell.tools.data.sparql.SparqlProcessorMain.Opts.opt
import cmwell.tools.data.utils.ArgsManipulations._
import cmwell.tools.data.utils.akka.Implicits._
import cmwell.tools.data.utils.akka._
import cmwell.tools.data.utils.ops._
import com.typesafe.scalalogging.LazyLogging
import org.rogach.scallop.ScallopConf

import scala.concurrent.ExecutionContext.Implicits.global

object IngesterMain extends App with LazyLogging {
  object Opts extends ScallopConf(args) {
    version(s"cm-well ingester ${getVersionFromManifest()} (c) 2015")

    val host = opt[String]("host", descr = "cm-well host name", required = true)
    val format = opt[String]("format", descr = "input format (e.g. ntriples, nquads, jsonld)", required = true)
    val file = opt[String]("file", descr = "input file path", default = None)
    val gzip = opt[Boolean]("gzip", descr = "is input file gzipped", default = Some(false))
    val token = opt[String]("token", descr = "cm-well write permission token", default = None)
    val replaceMode =
      opt[Boolean]("with-replace-mode", descr = "replace-mode parameter in cm-well", default = Some(false))
    val force = opt[Boolean]("force", descr = "force parameter in cm-well", default = Some(false))
    val priority = opt[Boolean]("priority", default = Some(false), descr = "ingest data in priority mode")
    val numConnections = opt[Int]("num-connections", descr = "number of http connections to open")

    dependsOnAll(gzip, List(file))
    verify()
  }

  val start = System.currentTimeMillis()

  var totalIngestedBytes = 0L
  var ingestedBytesInWindow = 0
  var ingestedInfotonsInWindow = 0
  var totalIngestedInfotons = 0L
  var totalFailedInfotons = 0L
  var lastTime = start
  var nextPrint = 0L
  var lastMessageSize = 0
  val windowSizeMillis = 1000

  val formatter = java.text.NumberFormat.getNumberInstance

  // resize akka http connection pool
  Opts.numConnections.toOption.map { numConnections =>
    System.setProperty("akka.http.host-connection-pool.max-connections", numConnections.toString)
  }

  val inputStream = if (Opts.file.isSupplied) {
    val inputFile = new FileInputStream(Opts.file())
    if (Opts.gzip()) {
      new GZIPInputStream(inputFile)
    } else {
      inputFile
    }
  } else {
    System.in
  }

  val result = Ingester
    .fromInputStream(
      baseUrl = formatHost(Opts.host()),
      format = Opts.format(),
      writeToken = Opts.token.toOption,
      replaceMode = Opts.replaceMode(),
      force = Opts.force(),
      isPriority = Opts.priority(),
      in = inputStream
    )
    .via(IngesterStats(isStderr = true))
    .runWith(Sink.ignore)

  // actor system is still alive, will be destroyed when finished
  result.onComplete { x =>
    System.err.println("\n")
    System.err.println(s"finished: $x")
    cleanup()
  }
} 
Example 84
Source File: BlockLang.scala    From jgo   with GNU General Public License v3.0 5 votes vote down vote up
package jgo.tools.compiler
package parser

import scala.util.parsing.input.Reader

import lexer._
import scope._
import interm._
import interm.types._

import stmts._
import funcs._


class BlockLang(in: Reader[Token], res: List[Type] = Nil, resNamed: Boolean = false) extends FuncContext with Statements {
  //def, not val.  See comment in StackScoped
  def initialEnclosing = UniverseScope
  
  def targetFuncType = FuncType(Nil, res)
  def hasNamedResults = resNamed
  
  lazy val result = phrase(block)(in)
}

object BlockLang {
  import java.io.{File, InputStream, FileInputStream, InputStreamReader}
  import scala.collection.immutable.PagedSeq
  
  def apply(in: Reader[Char]):  BlockLang = new BlockLang(Scanner(in))
  def apply(inStr: String):     BlockLang = new BlockLang(Scanner(inStr))
  def apply(in: InputStream):   BlockLang = new BlockLang(Scanner(in))
  def apply(file: File):        BlockLang = new BlockLang(Scanner(file))
  
  def from(fileName: String):   BlockLang = new BlockLang(Scanner.from(fileName))
} 
Example 85
Source File: Scanner.scala    From jgo   with GNU General Public License v3.0 5 votes vote down vote up
package jgo.tools.compiler
package lexer

import scala.util.parsing._
import input._
import combinator._

//portions of this class taken from scala.util.parsing.combinator.lexical.Scanners#Scanner
final class Scanner private(prev: Option[Token], in: Reader[Char]) extends Reader[Token] {
  private def this(in: Reader[Char]) = this(None, in)
  
  private val (tok, remainingIn) = Lexical.token(prev, in)
  
  def      first = {  tok }
  lazy val rest  = new Scanner(Some(tok), remainingIn)
  lazy val pos   = Lexical.stripWhitespace(in).pos
  def      atEnd = tok == EOF
  
  override def source = in.source
  override def offset = in.offset
  
  def foreach[U](f: Token => U) {
    var cur = this
    while (!cur.atEnd) {
      f(cur.first)
      cur = cur.rest
    }
  }
}

object Scanner {
  import java.io.{File, InputStream, FileInputStream, InputStreamReader}
  import scala.collection.immutable.PagedSeq
  
  def apply(in: Reader[Char]): Scanner = new Scanner(None, in)
  def apply(inStr: String):    Scanner = new Scanner(new CharArrayReader(inStr.toCharArray()))
  def apply(in: File):         Scanner = apply(new FileInputStream(in))
  def apply(in: InputStream):  Scanner =
    new Scanner(None, new PagedSeqReader(PagedSeq.fromReader(new InputStreamReader(in , "UTF-8"))))
  
  def from(fileName: String): Scanner = apply(new FileInputStream(fileName))
} 
Example 86
Source File: LexTestAll.scala    From jgo   with GNU General Public License v3.0 5 votes vote down vote up
import jgo.tools.compiler._
import parser.BlockLang
import parser.combinatorExten._
import lexer._

import interm.codeseq._

import java.io.{File, InputStream, FileInputStream, InputStreamReader}

object LexTestAll {
  def main(args: Array[String]) {
    if (args.isEmpty)
      testAll(new File(System.getProperty("user.home") + "/Desktop/gotest/"))
    else
      testAll(new File(args(0)))
  }
  
  def testAll(dir: File) {
    for (file <- dir.listFiles)
      if (file.isDirectory)
        testAll(file)
      else if (file.isFile && !file.isHidden)
        test(file)
  }
  
  def test(file: File) {
    println("testing: " + file.getCanonicalPath)
    println()
    
    var cur = Scanner(file)
    print("tokenization: ")
    while (!cur.atEnd) {
      print(cur.first + " ")
      cur = cur.rest
    }
    println()
    println()
  }
} 
Example 87
Source File: DeployDynamoDBLocal.scala    From sbt-dynamodb   with MIT License 5 votes vote down vote up
package com.localytics.sbt.dynamodb

import java.io.FileInputStream
import java.net.URL
import java.util.zip.GZIPInputStream
import java.util.zip.ZipFile

import sbt.File
import sbt.Keys._

import scala.concurrent.duration.Duration
import scala.sys.process._
import scala.util.Try

object DeployDynamoDBLocal {

  private[dynamodb] def validJar(file: File): Boolean = Try(new ZipFile(file)).isSuccess

  private[dynamodb] def validGzip(file: File): Boolean = Try(new GZIPInputStream(new FileInputStream(file)).read()).isSuccess

  def apply(ver: String, url: Option[String], targetDir: File, downloadIfOlderThan: Duration, streamz: TaskStreams): File = {
    val targz = new File(targetDir, s"dynamodb_local_$ver.tar.gz")
    val jar = new File(targetDir, "DynamoDBLocal.jar")

    def isStale(file: File) = ver == "latest" && System.currentTimeMillis - file.lastModified() > downloadIfOlderThan.toMillis

    if (!targetDir.exists()) {
      streamz.log.info(s"Creating DynamoDB Local directory $targetDir")
      targetDir.mkdirs()
    }
    if (!targz.exists() || isStale(targz) || !validGzip(targz)) {
      val remoteFile = url.getOrElse(s"https://s3-us-west-2.amazonaws.com/dynamodb-local/dynamodb_local_$ver.tar.gz")
      streamz.log.info(s"Downloading targz from [$remoteFile] to [${targz.getAbsolutePath}]")
      (new URL(remoteFile) #> targz).!!
    }
    if (!validGzip(targz)) sys.error(s"Invalid gzip file at [${targz.getAbsolutePath}]")
    if (!jar.exists() || !validJar(jar)) {
      streamz.log.info(s"Extracting jar from [${targz.getAbsolutePath}] to [${jar.getAbsolutePath}]")
      Process(Seq("tar", "xzf", targz.getName), targetDir).!!
    }
    if (!validJar(jar)) sys.error(s"Invalid jar file at [${jar.getAbsolutePath}]")
    jar
  }

} 
Example 88
Source File: MetricsConfig.scala    From iolap   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.metrics

import java.io.{FileInputStream, InputStream}
import java.util.Properties

import scala.collection.mutable
import scala.util.matching.Regex

import org.apache.spark.Logging
import org.apache.spark.util.Utils

private[spark] class MetricsConfig(val configFile: Option[String]) extends Logging {

  private val DEFAULT_PREFIX = "*"
  private val INSTANCE_REGEX = "^(\\*|[a-zA-Z]+)\\.(.+)".r
  private val DEFAULT_METRICS_CONF_FILENAME = "metrics.properties"

  private[metrics] val properties = new Properties()
  private[metrics] var propertyCategories: mutable.HashMap[String, Properties] = null

  private def setDefaultProperties(prop: Properties) {
    prop.setProperty("*.sink.servlet.class", "org.apache.spark.metrics.sink.MetricsServlet")
    prop.setProperty("*.sink.servlet.path", "/metrics/json")
    prop.setProperty("master.sink.servlet.path", "/metrics/master/json")
    prop.setProperty("applications.sink.servlet.path", "/metrics/applications/json")
  }

  def initialize() {
    // Add default properties in case there's no properties file
    setDefaultProperties(properties)

    // If spark.metrics.conf is not set, try to get file in class path
    val isOpt: Option[InputStream] = configFile.map(new FileInputStream(_)).orElse {
      try {
        Option(Utils.getSparkClassLoader.getResourceAsStream(DEFAULT_METRICS_CONF_FILENAME))
      } catch {
        case e: Exception =>
          logError("Error loading default configuration file", e)
          None
      }
    }

    isOpt.foreach { is =>
      try {
        properties.load(is)
      } finally {
        is.close()
      }
    }

    propertyCategories = subProperties(properties, INSTANCE_REGEX)
    if (propertyCategories.contains(DEFAULT_PREFIX)) {
      import scala.collection.JavaConversions._

      val defaultProperty = propertyCategories(DEFAULT_PREFIX)
      for { (inst, prop) <- propertyCategories
            if (inst != DEFAULT_PREFIX)
            (k, v) <- defaultProperty
            if (prop.getProperty(k) == null) } {
        prop.setProperty(k, v)
      }
    }
  }

  def subProperties(prop: Properties, regex: Regex): mutable.HashMap[String, Properties] = {
    val subProperties = new mutable.HashMap[String, Properties]
    import scala.collection.JavaConversions._
    prop.foreach { kv =>
      if (regex.findPrefixOf(kv._1).isDefined) {
        val regex(prefix, suffix) = kv._1
        subProperties.getOrElseUpdate(prefix, new Properties).setProperty(suffix, kv._2)
      }
    }
    subProperties
  }

  def getInstance(inst: String): Properties = {
    propertyCategories.get(inst) match {
      case Some(s) => s
      case None => propertyCategories.getOrElse(DEFAULT_PREFIX, new Properties)
    }
  }
} 
Example 89
Source File: XMLFilesModel.scala    From RTran   with Apache License 2.0 5 votes vote down vote up
package com.ebay.rtran.xml

import java.io.{File, FileInputStream}

import org.apache.axiom.om.{OMElement, OMXMLBuilderFactory}
import org.apache.commons.io.FileUtils
import com.ebay.rtran.api.{IModel, IModelProvider}
import com.ebay.rtran.generic.GenericProjectCtx
import com.ebay.rtran.xml.util.XmlUtil

import scala.collection.JavaConversions._
import scala.language.postfixOps
import scala.util.{Success, Try}


case class XMLFilesModel(projectRoot: File,
                         xmlRoots: Map[File, OMElement],
                         modified: Map[File, Option[OMElement]] = Map.empty) extends IModel

class XMLFilesModelProvider extends IModelProvider[XMLFilesModel, GenericProjectCtx] {
  override def id(): String = getClass.getName

  override def save(model: XMLFilesModel): Unit = {
    model.modified foreach {
      case (file, root) => root.map(r => XmlUtil.writeOMElement2File(file, r))
    }
  }

  override def create(projectCtx: GenericProjectCtx): XMLFilesModel = XMLFilesModel(
    projectCtx.rootDir,
    FileUtils.listFiles(projectCtx.rootDir, Array("xml"), true) map {file =>
      file -> Try(OMXMLBuilderFactory.createOMBuilder(new FileInputStream(file)).getDocumentElement)
    } collect {
      case (f, Success(r)) => f -> r
    } toMap
  )
} 
Example 90
Source File: MimeTypeDetectorTest.scala    From TransmogrifAI   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.salesforce.op.stages.impl.feature

import java.io.FileInputStream

import com.salesforce.op._
import com.salesforce.op.features.types._
import com.salesforce.op.stages.base.unary.UnaryTransformer
import com.salesforce.op.test.{OpTransformerSpec, TestFeatureBuilder, TestSparkContext}
import com.salesforce.op.testkit.RandomText
import com.salesforce.op.utils.spark.RichDataset._
import org.apache.commons.io.IOUtils
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner


@RunWith(classOf[JUnitRunner])
class MimeTypeDetectorTest extends OpTransformerSpec[Text, MimeTypeDetector] with Base64TestData {
  val inputData = randomData
  val transformer = new MimeTypeDetector().setInput(randomBase64)
  val expectedResult = expectedRandom

  it should "validate the type hint" in {
    assertThrows[IllegalArgumentException](new MimeTypeDetector().setTypeHint("blarg"))
  }
  it should "validate the ma bytes to parse" in {
    assertThrows[IllegalArgumentException](new MimeTypeDetector().setMaxBytesToParse(-1L))
  }
  it should "detect octet stream data" in {
    val mime = randomBase64.detectMimeTypes()
    mime.originStage shouldBe a[UnaryTransformer[_, _]]
    val result = mime.originStage.asInstanceOf[UnaryTransformer[Base64, Text]].transform(randomData)

    result.collect(mime) should contain theSameElementsInOrderAs expectedRandom
  }
  it should "detect other mime types" in {
    val mime = realBase64.detectMimeTypes()
    val result = mime.originStage.asInstanceOf[UnaryTransformer[Base64, Text]].transform(realData)

    result.collect(mime) should contain theSameElementsInOrderAs expectedMime
  }
  it should "detect other mime types with a json type hint" in {
    val mime = realBase64.detectMimeTypes(typeHint = Some("application/json"))
    val result = mime.originStage.asInstanceOf[UnaryTransformer[Base64, Text]].transform(realData)

    result.collect(mime) should contain theSameElementsInOrderAs expectedMimeJson
  }
}


trait Base64TestData {
  self: TestSparkContext =>

  val seed = 42L

  lazy val (randomData, randomBase64) = {
    val rnd = RandomText.base64(0, 10000)
    rnd.reset(seed)
    TestFeatureBuilder(Base64.empty +: Base64("") +: rnd.take(10).toSeq)
  }
  lazy val (realData, realBase64) = TestFeatureBuilder(
    Seq(
      "811harmo24to36.mp3", "820orig36to48.wav", "face.png",
      "log4j.properties", "note.xml", "RunnerParams.json",
      "dummy.csv", "Canon_40D.jpg", "sample.pdf"
    ).map(loadResourceAsBase64)
  )

  val expectedRandom = Text.empty +: Seq.fill(11)(Text("application/octet-stream"))

  val expectedMime = Seq(
    "audio/mpeg", "audio/vnd.wave", "image/png",
    "text/plain", "application/xml", "text/plain",
    "text/plain", "image/jpeg", "application/pdf"
  ).map(_.toText)

  val expectedMimeJson = Seq(
    "audio/mpeg", "audio/vnd.wave", "image/png",
    "application/json", "application/xml", "application/json",
    "application/json", "image/jpeg", "application/pdf"
  ).map(_.toText)

  def loadResourceAsBase64(name: String): Base64 = Base64 {
    val bytes = IOUtils.toByteArray(new FileInputStream(resourceFile(name = name)))
    new String(java.util.Base64.getEncoder.encode(bytes))
  }

} 
Example 91
Source File: LogFile.scala    From kyuubi   with Apache License 2.0 5 votes vote down vote up
package yaooqinn.kyuubi.operation

import java.io.{BufferedReader, File, FileInputStream, FileNotFoundException, FileOutputStream, InputStreamReader, IOException, PrintStream}
import java.util.ArrayList

import scala.collection.JavaConverters._

import org.apache.commons.io.FileUtils
import org.apache.hadoop.io.IOUtils
import org.apache.kyuubi.Logging
import org.apache.spark.sql.Row

import yaooqinn.kyuubi.KyuubiSQLException

class LogFile private (
    file: File,
    private var reader: Option[BufferedReader],
    writer: PrintStream,
    @volatile private var isRemoved: Boolean = false) extends Logging {

  def this(file: File) = {
    this(file,
      LogFile.createReader(file, isRemoved = false),
      new PrintStream(new FileOutputStream(file)))
  }

  private def resetReader(): Unit = {
    reader.foreach(IOUtils.closeStream)
    reader = None
  }

  private def readResults(nLines: Long): Seq[Row] = {
    reader = reader.orElse(LogFile.createReader(file, isRemoved))

    val logs = new ArrayList[Row]()
    reader.foreach { r =>
      var i = 1
      try {
        var line: String = r.readLine()
        while ((i < nLines || nLines <= 0) && line != null) {
          logs.add(Row(line))
          line = r.readLine()
          i += 1
        }
      } catch {
        case e: FileNotFoundException =>
          val operationHandle = file.getName
          val path = file.getAbsolutePath
          val msg = if (isRemoved) {
            s"Operation[$operationHandle] has been closed and the log file $path has been removed"
          } else {
            s"Operation[$operationHandle] Log file $path is not found"
          }
          throw new KyuubiSQLException(msg, e)
      }
    }
    logs.asScala
  }

  
  def write(msg: String): Unit = {
    writer.print(msg)
  }


  def close(): Unit = synchronized {
    try {
      reader.foreach(_.close())
      writer.close()
      if (!isRemoved) {
        FileUtils.forceDelete(file)
        isRemoved = true
      }
    } catch {
      case e: IOException =>
        error(s"Failed to remove corresponding log file of operation: ${file.getName}", e)
    }
  }
}

object LogFile {

  def createReader(file: File, isRemoved: Boolean): Option[BufferedReader] = try {
    Option(new BufferedReader(new InputStreamReader(new FileInputStream(file))))
  } catch {
    case e: FileNotFoundException =>
      val operationHandle = file.getName
      val path = file.getAbsolutePath
      val msg = if (isRemoved) {
        s"Operation[$operationHandle] has been closed and the log file $path has been removed"
      } else {
        s"Operation[$operationHandle] Log file $path is not found"
      }
      throw new KyuubiSQLException(msg, e)
  }
} 
Example 92
Source File: CustomReceiver.scala    From spark1.52   with Apache License 2.0 5 votes vote down vote up
// scalastyle:off println
package org.apache.spark.examples.streaming

import java.io.{InputStreamReader, BufferedReader, InputStream}
import java.net.Socket
import org.apache.spark.{SparkConf, Logging}
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.receiver.Receiver
import java.io.File
import java.io.FileInputStream


  private def receive() {
   var socket: Socket = null
   var userInput: String = null
   try {
     logInfo("Connecting to " + host + ":" + port)
     socket = new Socket(host, port) //连接机器
     logInfo("Connected to " + host + ":" + port)
     //获取网络连接输入流
     println("isConnected:"+socket.isConnected())
     val socketInput=socket.getInputStream()
     //
     //val inputFile=new File("../data/mllib/als/testCustomReceiver.data")
    // val  in = new FileInputStream(inputFile)
    //  val  in = new FileInputStream(socketInput)
     val reader = new BufferedReader(new InputStreamReader(socketInput, "UTF-8"))
     userInput = reader.readLine()
     while(!isStopped && userInput != null) {
       store(userInput)//存储数据
       userInput = reader.readLine()//读取数据
       println("userInput:"+userInput)
     }
     reader.close()//关闭流
     socket.close()//关闭连接
     logInfo("Stopped receiving")
     restart("Trying to connect again")
   } catch {
     case e: java.net.ConnectException =>
       restart("Error connecting to " + host + ":" + port, e)
     case t: Throwable =>
       restart("Error receiving data", t)
   }
  }
}
// scalastyle:on println 
Example 93
Source File: MetricsConfig.scala    From spark1.52   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.metrics

import java.io.{FileInputStream, InputStream}
import java.util.Properties

import scala.collection.mutable
import scala.util.matching.Regex

import org.apache.spark.util.Utils
import org.apache.spark.{Logging, SparkConf}

private[spark] class MetricsConfig(conf: SparkConf) extends Logging {

  private val DEFAULT_PREFIX = "*"
  private val INSTANCE_REGEX = "^(\\*|[a-zA-Z]+)\\.(.+)".r
  private val DEFAULT_METRICS_CONF_FILENAME = "metrics.properties"

  private[metrics] val properties = new Properties()
  private[metrics] var propertyCategories: mutable.HashMap[String, Properties] = null

  private def setDefaultProperties(prop: Properties) {
    prop.setProperty("*.sink.servlet.class", "org.apache.spark.metrics.sink.MetricsServlet")
    prop.setProperty("*.sink.servlet.path", "/metrics/json")
    prop.setProperty("master.sink.servlet.path", "/metrics/master/json")
    prop.setProperty("applications.sink.servlet.path", "/metrics/applications/json")
  }

  def initialize() {
    // Add default properties in case there's no properties file
    // 添加默认属性的情况下,没有任何属性文件
    setDefaultProperties(properties)

    loadPropertiesFromFile(conf.getOption("spark.metrics.conf"))

    // Also look for the properties in provided Spark configuration
    //还要查找提供的Spark配置中的属性
    val prefix = "spark.metrics.conf."
    conf.getAll.foreach {
      case (k, v) if k.startsWith(prefix) =>
        properties.setProperty(k.substring(prefix.length()), v)
      case _ =>
    }

    propertyCategories = subProperties(properties, INSTANCE_REGEX)
    if (propertyCategories.contains(DEFAULT_PREFIX)) {
      import scala.collection.JavaConversions._

      val defaultProperty = propertyCategories(DEFAULT_PREFIX)
      for { (inst, prop) <- propertyCategories
            if (inst != DEFAULT_PREFIX)
            (k, v) <- defaultProperty
            if (prop.getProperty(k) == null) } {
        prop.setProperty(k, v)
      }
    }
  }
//使用正则匹配properties中以source.开头的属性,然后将属性中的source反映得到的实例加入HashMap
  def subProperties(prop: Properties, regex: Regex): mutable.HashMap[String, Properties] = {
    val subProperties = new mutable.HashMap[String, Properties]
    import scala.collection.JavaConversions._
    prop.foreach { kv =>
      if (regex.findPrefixOf(kv._1).isDefined) {
        val regex(prefix, suffix) = kv._1
        subProperties.getOrElseUpdate(prefix, new Properties).setProperty(suffix, kv._2)
      }
    }
    subProperties
  }

  def getInstance(inst: String): Properties = {
    propertyCategories.get(inst) match {
      case Some(s) => s
      case None => propertyCategories.getOrElse(DEFAULT_PREFIX, new Properties)
    }
  }

  
  private[this] def loadPropertiesFromFile(path: Option[String]): Unit = {
    var is: InputStream = null
    try {
      is = path match {
        case Some(f) => new FileInputStream(f)
        case None => Utils.getSparkClassLoader.getResourceAsStream(DEFAULT_METRICS_CONF_FILENAME)
      }

      if (is != null) {
        properties.load(is)
      }
    } catch {
      case e: Exception =>
        val file = path.getOrElse(DEFAULT_METRICS_CONF_FILENAME)
        logError(s"Error loading configuration file $file", e)
    } finally {
      if (is != null) {
        is.close()
      }
    }
  }

} 
Example 94
Source File: Package.scala    From seed   with Apache License 2.0 5 votes vote down vote up
package seed.generation

import java.io.{File, FileInputStream, OutputStream}
import java.util.jar.{Attributes, JarEntry, JarOutputStream, Manifest}

import org.apache.commons.io.IOUtils
import java.nio.file.Path

import seed.Log
import seed.cli.util.Ansi

import scala.collection.mutable

// Adapted from https://stackoverflow.com/a/1281295
object Package {
  def create(
    source: List[(Path, String)],
    target: OutputStream,
    mainClass: Option[String],
    classPath: List[String],
    log: Log
  ): Unit = {
    val manifest       = new Manifest()
    val mainAttributes = manifest.getMainAttributes
    mainAttributes.put(Attributes.Name.MANIFEST_VERSION, "1.0")
    // TODO Set additional package fields: https://docs.oracle.com/javase/tutorial/deployment/jar/packageman.html
    mainClass.foreach(
      cls => mainAttributes.put(Attributes.Name.MAIN_CLASS, cls)
    )
    if (classPath.nonEmpty)
      mainAttributes.put(Attributes.Name.CLASS_PATH, classPath.mkString(" "))

    val targetFile = new JarOutputStream(target, manifest)
    val entryCache = mutable.Set[String]()
    source.foreach {
      case (path, jarPath) =>
        log.debug(s"Packaging ${Ansi.italic(path.toString)}...")
        add(path.toFile, jarPath, targetFile, entryCache, log)
    }
    targetFile.close()
  }

  def add(
    source: File,
    jarPath: String,
    target: JarOutputStream,
    entryCache: mutable.Set[String],
    log: Log
  ): Unit = {
    val path =
      if (source.isFile) jarPath
      else {
        require(!jarPath.endsWith("/"))
        jarPath + "/"
      }

    val addedEntry =
      if (entryCache.contains(path)) {
        if (source.isFile)
          log.warn(
            s"Skipping file ${Ansi.italic(source.toString)} as another module already added it"
          )

        false
      } else {
        val entry = new JarEntry(path)
        entry.setTime(source.lastModified)
        target.putNextEntry(entry)
        entryCache += path
        if (source.isFile) IOUtils.copy(new FileInputStream(source), target)

        true
      }

    if (!source.isFile)
      for (nestedFile <- source.listFiles)
        add(nestedFile, path + nestedFile.getName, target, entryCache, log)

    if (addedEntry) target.closeEntry()
  }
} 
Example 95
Source File: Assemblies.scala    From akka-grpc   with Apache License 2.0 5 votes vote down vote up
import java.io.{ File, FileInputStream, FileOutputStream }
import java.nio.file.Files

object Assemblies {

  
  def mkBatAssembly(assembly: File): File = {
    val file = Files.createTempFile("akka-grpc-", ".tmp").toFile

    file.deleteOnExit()
    copySkippingUntil('@'.toByte, assembly, file)
    file
  }

  private def copySkippingUntil(b: Byte, src: File, dst: File): Unit = {
    val in = new FileInputStream(src)
    try {
      val out = new FileOutputStream(dst, false)
      val foundSkipByte = Iterator.continually(in.read()).takeWhile(_ >= 0).dropWhile(_ != b.toInt).nonEmpty

      try {
        if (foundSkipByte)
          out.write(b.toInt)

        val buffer = new Array[Byte](1024)
        var continue = true && foundSkipByte
        while (continue) {
          val r = in.read(buffer)
          if (r < 0) continue = false
          else out.write(buffer, 0, r)
        }
      } finally {
        out.close()
      }
    } finally {
      in.close()
    }
  }
} 
Example 96
Source File: AkkaGrpcServerScala.scala    From akka-grpc   with Apache License 2.0 5 votes vote down vote up
package akka.grpc.interop

import java.io.FileInputStream
import java.nio.file.{ Files, Paths }
import java.security.cert.CertificateFactory
import java.security.spec.PKCS8EncodedKeySpec
import java.security.{ KeyFactory, KeyStore, SecureRandom }

import scala.concurrent.duration._

import akka.actor.ActorSystem
import akka.util.ByteString
import akka.http.scaladsl.Http.ServerBinding
import akka.http.scaladsl.model.{ HttpRequest, HttpResponse }
import akka.http.scaladsl.{ Http2, HttpsConnectionContext }
import akka.stream.SystemMaterializer
import io.grpc.internal.testing.TestUtils
import javax.net.ssl.{ KeyManagerFactory, SSLContext }

import scala.concurrent.{ Await, Future }


case class AkkaGrpcServerScala(serverHandlerFactory: ActorSystem => HttpRequest => Future[HttpResponse])
    extends GrpcServer[(ActorSystem, ServerBinding)] {
  override def start() = {
    implicit val sys = ActorSystem()
    implicit val mat = SystemMaterializer(sys).materializer

    val testService = serverHandlerFactory(sys)

    val bindingFuture = Http2().bindAndHandleAsync(
      testService,
      interface = "127.0.0.1",
      port = 0,
      parallelism = 256, // TODO remove once https://github.com/akka/akka-http/pull/2146 is merged
      connectionContext = serverHttpContext())

    val binding = Await.result(bindingFuture, 10.seconds)
    (sys, binding)
  }

  override def stop(binding: (ActorSystem, ServerBinding)) =
    binding match {
      case (sys, binding) =>
        sys.log.info("Exception thrown, unbinding")
        Await.result(binding.unbind(), 10.seconds)
        Await.result(sys.terminate(), 10.seconds)
    }

  private def serverHttpContext() = {
    val keyEncoded =
      new String(Files.readAllBytes(Paths.get(TestUtils.loadCert("server1.key").getAbsolutePath)), "UTF-8")
        .replace("-----BEGIN PRIVATE KEY-----\n", "")
        .replace("-----END PRIVATE KEY-----\n", "")
        .replace("\n", "")

    val decodedKey = ByteString(keyEncoded).decodeBase64.toArray

    val spec = new PKCS8EncodedKeySpec(decodedKey)

    val kf = KeyFactory.getInstance("RSA")
    val privateKey = kf.generatePrivate(spec)

    val fact = CertificateFactory.getInstance("X.509")
    val is = new FileInputStream(TestUtils.loadCert("server1.pem"))
    val cer = fact.generateCertificate(is)

    val ks = KeyStore.getInstance("PKCS12")
    ks.load(null)
    ks.setKeyEntry("private", privateKey, Array.empty, Array(cer))

    val keyManagerFactory = KeyManagerFactory.getInstance("SunX509")
    keyManagerFactory.init(ks, null)

    val context = SSLContext.getInstance("TLS")
    context.init(keyManagerFactory.getKeyManagers, null, new SecureRandom)

    new HttpsConnectionContext(context)
  }

  override def getPort(binding: (ActorSystem, ServerBinding)): Int = binding._2.localAddress.getPort
} 
Example 97
Source File: SslContexts.scala    From kubernetes-client   with Apache License 2.0 5 votes vote down vote up
package com.goyeau.kubernetes.client.util
import java.io.{ByteArrayInputStream, File, FileInputStream, InputStreamReader}
import java.security.cert.{CertificateFactory, X509Certificate}
import java.security.{KeyStore, SecureRandom, Security}
import java.util.Base64

import com.goyeau.kubernetes.client.KubeConfig
import javax.net.ssl.{KeyManagerFactory, SSLContext, TrustManagerFactory}
import org.bouncycastle.jce.provider.BouncyCastleProvider
import org.bouncycastle.openssl.jcajce.JcaPEMKeyConverter
import org.bouncycastle.openssl.{PEMKeyPair, PEMParser}

object SslContexts {
  private val TrustStoreSystemProperty         = "javax.net.ssl.trustStore"
  private val TrustStorePasswordSystemProperty = "javax.net.ssl.trustStorePassword"
  private val KeyStoreSystemProperty           = "javax.net.ssl.keyStore"
  private val KeyStorePasswordSystemProperty   = "javax.net.ssl.keyStorePassword"

  def fromConfig(config: KubeConfig): SSLContext = {
    val sslContext = SSLContext.getInstance("TLS")
    sslContext.init(keyManagers(config), trustManagers(config), new SecureRandom)
    sslContext
  }

  private def keyManagers(config: KubeConfig) = {
    // Client certificate
    val certDataStream = config.clientCertData.map(data => new ByteArrayInputStream(Base64.getDecoder.decode(data)))
    val certFileStream = config.clientCertFile.map(new FileInputStream(_))

    // Client key
    val keyDataStream = config.clientKeyData.map(data => new ByteArrayInputStream(Base64.getDecoder.decode(data)))
    val keyFileStream = config.clientKeyFile.map(new FileInputStream(_))

    for {
      keyStream  <- keyDataStream.orElse(keyFileStream)
      certStream <- certDataStream.orElse(certFileStream)
    } yield {
      Security.addProvider(new BouncyCastleProvider())
      val pemKeyPair =
        new PEMParser(new InputStreamReader(keyStream)).readObject().asInstanceOf[PEMKeyPair] // scalafix:ok
      val privateKey = new JcaPEMKeyConverter().setProvider("BC").getPrivateKey(pemKeyPair.getPrivateKeyInfo)

      val certificateFactory = CertificateFactory.getInstance("X509")
      val certificate        = certificateFactory.generateCertificate(certStream).asInstanceOf[X509Certificate] // scalafix:ok

      defaultKeyStore.setKeyEntry(
        certificate.getSubjectX500Principal.getName,
        privateKey,
        config.clientKeyPass.fold(Array.empty[Char])(_.toCharArray),
        Array(certificate)
      )
    }

    val keyManagerFactory = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm)
    keyManagerFactory.init(defaultKeyStore, Array.empty)
    keyManagerFactory.getKeyManagers
  }

  private lazy val defaultKeyStore = {
    val propertyKeyStoreFile =
      Option(System.getProperty(KeyStoreSystemProperty, "")).filter(_.nonEmpty).map(new File(_))

    val keyStore = KeyStore.getInstance(KeyStore.getDefaultType)
    keyStore.load(
      propertyKeyStoreFile.map(new FileInputStream(_)).orNull,
      System.getProperty(KeyStorePasswordSystemProperty, "").toCharArray
    )
    keyStore
  }

  private def trustManagers(config: KubeConfig) = {
    val certDataStream = config.caCertData.map(data => new ByteArrayInputStream(Base64.getDecoder.decode(data)))
    val certFileStream = config.caCertFile.map(new FileInputStream(_))

    certDataStream.orElse(certFileStream).foreach { certStream =>
      val certificateFactory = CertificateFactory.getInstance("X509")
      val certificate        = certificateFactory.generateCertificate(certStream).asInstanceOf[X509Certificate] // scalafix:ok
      defaultTrustStore.setCertificateEntry(certificate.getSubjectX500Principal.getName, certificate)
    }

    val trustManagerFactory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm)
    trustManagerFactory.init(defaultTrustStore)
    trustManagerFactory.getTrustManagers
  }

  private lazy val defaultTrustStore = {
    val securityDirectory = s"${System.getProperty("java.home")}/lib/security"

    val propertyTrustStoreFile =
      Option(System.getProperty(TrustStoreSystemProperty, "")).filter(_.nonEmpty).map(new File(_))
    val jssecacertsFile = Option(new File(s"$securityDirectory/jssecacerts")).filter(f => f.exists && f.isFile)
    val cacertsFile     = new File(s"$securityDirectory/cacerts")

    val keyStore = KeyStore.getInstance(KeyStore.getDefaultType)
    keyStore.load(
      new FileInputStream(propertyTrustStoreFile.orElse(jssecacertsFile).getOrElse(cacertsFile)),
      System.getProperty(TrustStorePasswordSystemProperty, "changeit").toCharArray
    )
    keyStore
  }
} 
Example 98
Source File: CifarLoader.scala    From SparkNet   with MIT License 5 votes vote down vote up
package loaders

import java.io.File
import java.io.FileInputStream

import scala.util.Random

import libs._


class CifarLoader(path: String) {
  // We hardcode this because these are properties of the CIFAR-10 dataset.
  val height = 32
  val width = 32
  val channels = 3
  val size = channels * height * width
  val batchSize = 10000
  val nBatches = 5
  val nData = nBatches * batchSize

  val trainImages = new Array[Array[Float]](nData)
  val trainLabels = new Array[Int](nData)

  val testImages = new Array[Array[Float]](batchSize)
  val testLabels = new Array[Int](batchSize)

  val r = new Random()
  // val perm = Vector() ++ r.shuffle(1 to (nData - 1) toIterable)
  val indices = Vector() ++ (0 to nData - 1) toIterable
  val trainPerm = Vector() ++ r.shuffle(indices)
  val testPerm = Vector() ++ ((0 to batchSize) toIterable)

  val d = new File(path)
  if (!d.exists) {
    throw new Exception("The path " + path + " does not exist.")
  }
  if (!d.isDirectory) {
    throw new Exception("The path " + path + " is not a directory.")
  }
  val cifar10Files = List("data_batch_1.bin", "data_batch_2.bin", "data_batch_3.bin", "data_batch_4.bin", "data_batch_5.bin", "test_batch.bin")
  for (filename <- cifar10Files) {
    if (!d.list.contains(filename)) {
      throw new Exception("The directory " + path + " does not contain all of the Cifar10 data. Please run `bash $SPARKNET_HOME/data/cifar10/get_cifar10.sh` to obtain the Cifar10 data.")
    }
  }

  val fullFileList = d.listFiles.filter(_.getName().split('.').last == "bin").toList
  val testFile = fullFileList.find(x => x.getName().split('/').last == "test_batch.bin").head
  val fileList = fullFileList diff List(testFile)

  for (i <- 0 to nBatches - 1) {
    readBatch(fileList(i), i, trainImages, trainLabels, trainPerm)
  }
  readBatch(testFile, 0, testImages, testLabels, testPerm)

  val meanImage = new Array[Float](size)

  for (i <- 0 to nData - 1) {
    for (j <- 0 to size - 1) {
      meanImage(j) += trainImages(i)(j).toFloat / nData
    }
  }

  def readBatch(file: File, batch: Int, images: Array[Array[Float]], labels: Array[Int], perm: Vector[Int]) {
    val buffer = new Array[Byte](1 + size)
    val inputStream = new FileInputStream(file)

    var i = 0
    var nRead = inputStream.read(buffer)

    while(nRead != -1) {
      assert(i < batchSize)
      labels(perm(batch * batchSize + i)) = (buffer(0) & 0xFF) // convert to unsigned
      images(perm(batch * batchSize + i)) = new Array[Float](size)
      var j = 0
      while (j < size) {
        // we access buffer(j + 1) because the 0th position holds the label
        images(perm(batch * batchSize + i))(j) = buffer(j + 1) & 0xFF
        j += 1
      }
      nRead = inputStream.read(buffer)
      i += 1
    }
  }
} 
Example 99
Source File: package.scala    From doddle-model   with Apache License 2.0 5 votes vote down vote up
package io.picnicml

import java.io.{FileInputStream, ObjectInputStream}

import io.picnicml.doddlemodel.typeclasses.Estimator

package object doddlemodel {

  lazy val maxNumThreads: Int =
    System.getProperty("maxNumThreads", Runtime.getRuntime.availableProcessors.toString).toInt

  def loadEstimator[A: Estimator](filePath: String): A = {
    val inputStream = new ObjectInputStream(new FileInputStream(filePath))
    val instance = inputStream.readObject.asInstanceOf[A]
    inputStream.close()
    instance
  }
} 
Example 100
Source File: TikaParser.scala    From duometer   with Apache License 2.0 5 votes vote down vote up
package com.pawelmandera.io.tika

import java.io.{ File, FileInputStream }

import org.apache.tika.metadata.Metadata
import org.apache.tika.sax.BodyContentHandler
import org.apache.tika.parser.AutoDetectParser



object TikaParser {
  def text(file: File): String = {
    val is = new FileInputStream(file)
    val handler = new BodyContentHandler()
    val metadata = new Metadata()
    val parser = new AutoDetectParser()
    parser.parse(is, handler, metadata)
    handler.toString
  }
} 
Example 101
Source File: CommitLogFile.scala    From NSDb   with Apache License 2.0 5 votes vote down vote up
package io.radicalbit.nsdb.commit_log
import java.io.{File, FileInputStream}

import io.radicalbit.nsdb.commit_log.CommitLogWriterActor._

import scala.collection.mutable.ListBuffer

object CommitLogFile {

  
    def checkPendingEntries(implicit serializer: CommitLogSerializer): (List[Int], List[Int]) = {
      val pending: ListBuffer[Int]       = ListBuffer.empty[Int]
      val closedEntries: ListBuffer[Int] = ListBuffer.empty[Int]

      val contents    = new Array[Byte](5000)
      val inputStream = new FileInputStream(file)
      var r           = inputStream.read(contents)
      while (r != -1) {

        val rawEntry = serializer.deserialize(contents)

        rawEntry match {
          case Some(e: ReceivedEntry)    => if (!pending.contains(e.id)) pending += e.id
          case Some(e: AccumulatedEntry) => if (!pending.contains(e.id)) pending += e.id
          case Some(e: PersistedEntry)   => if (!closedEntries.contains(e.id)) closedEntries += e.id
          case Some(e: RejectedEntry)    => if (!closedEntries.contains(e.id)) closedEntries += e.id
          case Some(_)                   =>
          case None                      =>
        }
        r = inputStream.read(contents)
      }

      inputStream.close()
      (pending.toList, closedEntries.toList)
    }

  }
} 
Example 102
Source File: MetricsConfig.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.metrics

import java.io.{FileInputStream, InputStream}
import java.util.Properties

import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.util.matching.Regex

import org.apache.spark.util.Utils
import org.apache.spark.{Logging, SparkConf}

private[spark] class MetricsConfig(conf: SparkConf) extends Logging {

  private val DEFAULT_PREFIX = "*"
  private val INSTANCE_REGEX = "^(\\*|[a-zA-Z]+)\\.(.+)".r
  private val DEFAULT_METRICS_CONF_FILENAME = "metrics.properties"

  private[metrics] val properties = new Properties()
  private[metrics] var propertyCategories: mutable.HashMap[String, Properties] = null

  private def setDefaultProperties(prop: Properties) {
    prop.setProperty("*.sink.servlet.class", "org.apache.spark.metrics.sink.MetricsServlet")
    prop.setProperty("*.sink.servlet.path", "/metrics/json")
    prop.setProperty("master.sink.servlet.path", "/metrics/master/json")
    prop.setProperty("applications.sink.servlet.path", "/metrics/applications/json")
  }

  def initialize() {
    // Add default properties in case there's no properties file
    setDefaultProperties(properties)

    loadPropertiesFromFile(conf.getOption("spark.metrics.conf"))

    // Also look for the properties in provided Spark configuration
    val prefix = "spark.metrics.conf."
    conf.getAll.foreach {
      case (k, v) if k.startsWith(prefix) =>
        properties.setProperty(k.substring(prefix.length()), v)
      case _ =>
    }

    propertyCategories = subProperties(properties, INSTANCE_REGEX)
    if (propertyCategories.contains(DEFAULT_PREFIX)) {
      val defaultProperty = propertyCategories(DEFAULT_PREFIX).asScala
      for((inst, prop) <- propertyCategories if (inst != DEFAULT_PREFIX);
          (k, v) <- defaultProperty if (prop.get(k) == null)) {
        prop.put(k, v)
      }
    }
  }

  def subProperties(prop: Properties, regex: Regex): mutable.HashMap[String, Properties] = {
    val subProperties = new mutable.HashMap[String, Properties]
    prop.asScala.foreach { kv =>
      if (regex.findPrefixOf(kv._1.toString).isDefined) {
        val regex(prefix, suffix) = kv._1.toString
        subProperties.getOrElseUpdate(prefix, new Properties).setProperty(suffix, kv._2.toString)
      }
    }
    subProperties
  }

  def getInstance(inst: String): Properties = {
    propertyCategories.get(inst) match {
      case Some(s) => s
      case None => propertyCategories.getOrElse(DEFAULT_PREFIX, new Properties)
    }
  }

  
  private[this] def loadPropertiesFromFile(path: Option[String]): Unit = {
    var is: InputStream = null
    try {
      is = path match {
        case Some(f) => new FileInputStream(f)
        case None => Utils.getSparkClassLoader.getResourceAsStream(DEFAULT_METRICS_CONF_FILENAME)
      }

      if (is != null) {
        properties.load(is)
      }
    } catch {
      case e: Exception =>
        val file = path.getOrElse(DEFAULT_METRICS_CONF_FILENAME)
        logError(s"Error loading configuration file $file", e)
    } finally {
      if (is != null) {
        is.close()
      }
    }
  }

} 
Example 103
Source File: convertOutput.scala    From SparkAndMPIFactorizations   with MIT License 5 votes vote down vote up
package org.apache.spark.mllib.linalg.distributed
import breeze.linalg.{DenseMatrix, DenseVector}
import java.io.{DataInputStream, FileInputStream, FileWriter, File}

object ConvertDump { 

  type DM = DenseMatrix[Double]
  type DDV = DenseVector[Double]
  type DIV = DenseVector[Int]

  def loadDoubleVector( inf: DataInputStream) : DDV = {
    val len = inf.readInt()
    val v = DenseVector.zeros[Double](len)
    for (i <- 0 until len) {
      v(i) = inf.readDouble()
    }
    v
  }
  
  def loadIntVector( inf: DataInputStream) : DIV = {
    val len = inf.readInt()
    val v = DenseVector.zeros[Int](len)
    for (i <- 0 until len) {
      v(i) = inf.readInt()
    }
    v
  }

  def loadMatrix( inf: DataInputStream) : DM = {
    val (r,c) = Tuple2(inf.readInt(), inf.readInt())
    val m = DenseMatrix.zeros[Double](r,c)
    for (i <- 0 until r; j <- 0 until c) {
      m(i,j) = inf.readDouble()
    }
    m 
  }

  def loadDump(infname: String) : Tuple4[DM, DM, DDV, DDV] = {

    val inf = new DataInputStream( new FileInputStream(infname))

    val eofsU = loadMatrix(inf)
    val eofsV = loadMatrix(inf)
    val evals = loadDoubleVector(inf)
    val mean = loadDoubleVector(inf)

    inf.close()
    (eofsU, eofsV, evals, mean)
  }

  def writeDoubleMatrix(mat: DM, fn: String) = {
    val writer = new FileWriter(new File(fn))
    writer.write("%%MatrixMarket matrix coordinate real general\n")
    writer.write(s"${mat.rows} ${mat.cols} ${mat.rows*mat.cols}\n")
    for(i <- 0 until mat.rows) {
      for(j <- 0 until mat.cols) {
        writer.write(f"${i+1} ${j+1} ${mat(i, j)}%f\n")
      }
    }
    writer.close
  }

  def writeIntVector(vec: DIV, fn: String) = {
    val mat = vec.asDenseMatrix
    val writer = new FileWriter(new File(fn))
    writer.write("%%MatrixMarket matrix coordinate real general\n")
    writer.write(s"${mat.rows} ${mat.cols} ${mat.rows*mat.cols}\n")
    for(i <- 0 until mat.rows) {
      for(j <- 0 until mat.cols) {
        writer.write(s"${i+1} ${j+1} ${mat(i, j)}\n")
      }
    }
    writer.close
  }

  def main(args: Array[String]) {
    val (eofsU, eofsV, eofsS, mean) = loadDump(args(0))
    writeDoubleMatrix(eofsU, s"${args(1)}/colEOFs")
    writeDoubleMatrix(eofsV, s"${args(1)}/rowEOFs")
    writeDoubleMatrix(eofsS.asDenseMatrix, s"${args(1)}/evalEOFs")
    writeDoubleMatrix(mean.asDenseMatrix, s"${args(1)}/rowMeans")
  }
} 
Example 104
Source File: GimelProperties.scala    From gimel   with Apache License 2.0 5 votes vote down vote up
package com.paypal.gimel.common.conf

import java.io.{File, FileInputStream}
import java.util.{Calendar, Properties}

import scala.collection.JavaConverters._
import scala.collection.mutable

import com.paypal.gimel.logger.Logger

class GimelProperties(userProps: Map[String, String] = Map[String, String]()) {
  // Get Logger
  val logger = Logger()
  logger.info(s"Initiating --> ${this.getClass.getName}")
  // Get Properties
  val props: mutable.Map[String, String] = getProps
  val runTagUUID: String = java.util.UUID.randomUUID.toString
  val startTimeMS: String = Calendar.getInstance().getTimeInMillis.toString
  val tagToAdd: String = s"_$startTimeMS"

  private def getConf(key: String): String = {
    userProps.getOrElse(key, props(key))
  }

  // Kafka Properties
  val kafkaBroker: String = getConf(GimelConstants.KAFKA_BROKER_LIST)
  val kafkaConsumerCheckPointRoot: String = getConf(GimelConstants.KAFKA_CONSUMER_CHECKPOINT_PATH)
  val kafkaAvroSchemaKey: String = getConf(GimelConstants.KAFKA_CDH_SCHEMA)
  val confluentSchemaURL: String = getConf(GimelConstants.CONFLUENT_SCHEMA_URL)
  val hbaseNameSpace: String = getConf(GimelConstants.HBASE_NAMESPACE)
  val zkHostAndPort: String = getConf(GimelConstants.ZOOKEEPER_LIST)
  val zkPrefix: String = getConf(GimelConstants.ZOOKEEPER_STATE)
  val esHost: String = getConf(GimelConstants.ES_NODE)
  val esPort: String = getConf(GimelConstants.ES_PORT)

  // Kerberos
  val keytab: String = getConf(GimelConstants.KEY_TAB)
  val principal: String = getConf(GimelConstants.KEY_TAB_PRINCIPAL)
  val cluster: String = getConf(GimelConstants.CLUSTER)
  val dataSetDeploymentClusters: String = getConf(GimelConstants.DEPLOYMENT_CLUSTERS)


  val defaultESCluster: String = props(GimelConstants.ES_POLLING_STORAGES)

  def hiveURL(cluster: String): String = {
    userProps.getOrElse(s"gimel.hive.$cluster.url", props(s"gimel.hive.$cluster.url"))
  }

  def esURL(escluster: String): String = {
    val alternateConfig = props(s"gimel.es.${defaultESCluster}.url")
    userProps.getOrElse(GimelConstants.ES_URL_WITH_PORT, alternateConfig)
  }

  
  def apply(params: Map[String, String]): GimelProperties = new GimelProperties(params)

} 
Example 105
Source File: ArtifactHdfsSaver.scala    From marvin-engine-executor   with Apache License 2.0 5 votes vote down vote up
package org.marvin.artifact.manager

import java.io.{File, FileInputStream}

import akka.Done
import akka.actor.{Actor, ActorLogging}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.marvin.artifact.manager.ArtifactSaver.{SaveToLocal, SaveToRemote}
import org.marvin.model.EngineMetadata

class ArtifactHdfsSaver(metadata: EngineMetadata) extends Actor with ActorLogging {
  var conf: Configuration = _

  override def preStart() = {
    log.info(s"${this.getClass().getCanonicalName} actor initialized...")
    conf = new Configuration()

    if (sys.env.get("HADOOP_CONF_DIR") != None){
      val confFiles:List[File] = getListOfFiles(sys.env.get("HADOOP_CONF_DIR").mkString)

      for(file <- confFiles){
        log.info(s"Loading ${file.getAbsolutePath} file to hdfs client configuration ..")
        conf.addResource(new FileInputStream(file))
      }
    }

    conf.set("fs.defaultFS", metadata.hdfsHost)
  }

  def generatePaths(artifactName: String, protocol: String): Map[String, Path] = {
    Map(
      "localPath" -> new Path(s"${metadata.artifactsLocalPath}/${metadata.name}/$artifactName"),
      "remotePath" -> new Path(s"${metadata.artifactsRemotePath}/${metadata.name}/${metadata.version}/$artifactName/$protocol")
    )
  }

  def getListOfFiles(path: String): List[File] = {
    val dir = new File(path)
    val extensions = List("xml")
    dir.listFiles.filter(_.isFile).toList.filter { file =>
      extensions.exists(file.getName.endsWith(_))
    }
  }

  def validatePath(path: Path, isRemote: Boolean, fs: FileSystem): Boolean = {
    if (isRemote) {
      fs.exists(path)
    } else {
      new java.io.File(path.toString).exists
    }
  }

  override def receive: Receive = {
    case SaveToLocal(artifactName, protocol) =>
      log.info("Receive message and starting to working...")
      val fs = FileSystem.get(conf)
      val uris = generatePaths(artifactName, protocol)

      if (validatePath(uris("remotePath"), true, fs)) {
        log.info(s"Copying files from ${uris("remotePath")} to ${uris("localPath")}")
        fs.copyToLocalFile(false, uris("remotePath"), uris("localPath"), false)
        fs.close()
        log.info(s"File ${uris("localPath")} saved!")
      }
      else {
        log.error(s"Invalid protocol: ${protocol}, save process canceled!")
      }

      sender ! Done

    case SaveToRemote(artifactName, protocol) =>
      log.info("Receive message and starting to working...")
      val fs = FileSystem.get(conf)
      val uris = generatePaths(artifactName, protocol)

      if (validatePath(uris("localPath"), false, fs)) {
        log.info(s"Copying files from ${uris("localPath")} to ${uris("remotePath")}")
        fs.copyFromLocalFile(uris("localPath"), uris("remotePath"))
        fs.close()
        log.info(s"File ${uris("localPath")} saved!")
      }
      else {
        log.error(s"Invalid protocol: ${protocol}, save process canceled!")
      }

      sender ! Done

    case _ =>
      log.warning("Received a bad format message...")
  }
} 
Example 106
Source File: DownloadableFile.scala    From polynote   with Apache License 2.0 5 votes vote down vote up
package polynote.kernel.util

import java.io.{File, FileInputStream, InputStream}
import java.net.{HttpURLConnection, URI}
import java.util.ServiceLoader

import scala.collection.JavaConverters._
import cats.effect.IO
import zio.{RIO, ZIO}
import zio.blocking.{Blocking, effectBlocking}

trait DownloadableFile {
  def openStream: IO[InputStream]
  def size: IO[Long]
}

trait DownloadableFileProvider {
  def getFile(uri: URI): Option[DownloadableFile] = provide.lift(uri)

  def provide: PartialFunction[URI, DownloadableFile]

  def protocols: Seq[String]

  object Supported {
    def unapply(arg: URI): Option[URI] = {
      Option(arg.getScheme).flatMap(scheme => protocols.find(_ == scheme)).map(_ => arg)
    }
  }
}

object DownloadableFileProvider {
  private lazy val unsafeLoad = ServiceLoader.load(classOf[DownloadableFileProvider]).iterator.asScala.toList

  def isSupported(uri: URI): RIO[Blocking, Boolean] = effectBlocking(unsafeLoad).map { providers =>
    Option(uri.getScheme).exists(providers.flatMap(_.protocols).contains)
  }

  def getFile(uri: URI): ZIO[Blocking, Throwable, DownloadableFile] = {
    effectBlocking(unsafeLoad).map {
      providers =>
        for {
          scheme <- Option(uri.getScheme)
          provider <- providers.find(_.protocols.contains(scheme))
          file <- provider.getFile(uri)
        } yield file
    }.someOrFail(new Exception(s"Unable to find provider for uri $uri"))
  }
}

class HttpFileProvider extends DownloadableFileProvider {
  override def protocols: Seq[String] = Seq("http", "https")

  override def provide: PartialFunction[URI, DownloadableFile] = {
    case Supported(uri) => HTTPFile(uri)
  }
}

case class HTTPFile(uri: URI) extends DownloadableFile {
  override def openStream: IO[InputStream] = IO(uri.toURL.openStream())

  override def size: IO[Long] = IO(uri.toURL.openConnection().asInstanceOf[HttpURLConnection]).bracket { conn =>
    IO {
      conn.setRequestMethod("HEAD")
      conn.getContentLengthLong
    }
  } { conn => IO(conn.disconnect())}
}

class LocalFileProvider extends DownloadableFileProvider {
  override def protocols: Seq[String] = Seq("file")

  override def provide: PartialFunction[URI, DownloadableFile] = {
    case Supported(uri) => LocalFile(uri)
  }
}

case class LocalFile(uri: URI) extends DownloadableFile {
  lazy val file = new File(uri)
  override def openStream: IO[InputStream] = IO(new FileInputStream(file))

  override def size: IO[Long] = IO.pure(file.length())
} 
Example 107
Source File: PelagiosRDFCrosswalk.scala    From recogito2   with Apache License 2.0 5 votes vote down vote up
package services.entity.builtin.importer.crosswalks.rdf

import java.io.{File, FileInputStream, InputStream}
import services.entity._
import org.joda.time.{DateTime, DateTimeZone}
import org.pelagios.Scalagios
import org.pelagios.api.PeriodOfTime

object PelagiosRDFCrosswalk {

  private def toLinks(uris: Seq[String], linkType: LinkType.Value) =
    uris.map(uri => Link(EntityRecord.normalizeURI(uri), linkType))

  private def convertPeriodOfTime(period: PeriodOfTime): TemporalBounds = {
    val startDate = period.start
    val endDate = period.end.getOrElse(startDate)
    TemporalBounds(
      new DateTime(startDate).withZone(DateTimeZone.UTC),
      new DateTime(endDate).withZone(DateTimeZone.UTC))
  }

  def fromRDF(filename: String, identifier: String): InputStream => Seq[EntityRecord] = {
    def convertPlace(place: org.pelagios.api.gazetteer.Place) =
      EntityRecord(
        EntityRecord.normalizeURI(place.uri),
        identifier,
        DateTime.now().withZone(DateTimeZone.UTC),
        None,
        place.label,
        place.descriptions.map(l => Description(l.chars, l.lang)),
        place.names.map(l => Name(l.chars, l.lang)),
        place.location.map(_.geometry),
        place.location.map(_.pointLocation),
        None, // country code
        place.temporalCoverage.map(convertPeriodOfTime(_)),
        place.category.map(category => Seq(category.toString)).getOrElse(Seq.empty[String]),
        None, // priority
        {
          toLinks(place.closeMatches, LinkType.CLOSE_MATCH) ++
          toLinks(place.exactMatches, LinkType.EXACT_MATCH)
        })

    // Return crosswalk function
    { stream: InputStream =>
      Scalagios.readPlaces(stream, filename).map(convertPlace).toSeq }
  }

  def readFile(file: File, identifier: String): Seq[EntityRecord] =
    fromRDF(file.getName, identifier)(new FileInputStream(file))

} 
Example 108
Source File: LandingController.scala    From recogito2   with Apache License 2.0 5 votes vote down vote up
package controllers.landing

import com.mohiva.play.silhouette.api.Silhouette
import controllers.{ HasConfig, HasUserService, HasVisitLogging, HasPrettyPrintJSON, Security }
import java.io.FileInputStream
import javax.inject.{ Inject, Singleton }
import java.net.URI
import org.webjars.play.WebJarsUtil
import play.api.Configuration
import play.api.i18n.I18nSupport
import play.api.libs.json.{Json, JsObject}
import play.api.mvc.{Action, AbstractController, ControllerComponents}
import scala.concurrent.ExecutionContext
import services.annotation.AnnotationService
import services.contribution.ContributionService
import services.document.DocumentService
import services.user.UserService
import services.visit.VisitService

@Singleton
class LandingController @Inject() (
    val components: ControllerComponents,
    val config: Configuration,
    val annotations: AnnotationService,
    val contributions: ContributionService,
    val documents: DocumentService,
    val users: UserService,
    val silhouette: Silhouette[Security.Env],
    implicit val ctx: ExecutionContext,
    implicit val visits: VisitService,
    implicit val webjars: WebJarsUtil
) extends AbstractController(components) with HasConfig with HasUserService with HasVisitLogging with HasPrettyPrintJSON with I18nSupport {

  def index = silhouette.UserAwareAction { implicit request =>
    // Temporary hack only
    request.queryString.get("lang").flatMap(_.headOption) match {
      case Some(lang) =>
        Redirect(routes.LandingController.index).withLang(play.api.i18n.Lang(lang))
      case None =>
        request.identity match {
          case Some(user) =>
            Redirect(controllers.my.routes.WorkspaceController.workspace(user.username))

          case None =>
            logPageView()
            Ok(views.html.landing.index())
        }
    }
  }

  def getStats() = silhouette.UnsecuredAction.async { implicit request =>
    val fAnnotations = annotations.countTotal()
    val fEdits = contributions.countLast24hrs()
    val fUsers = users.countUsers()

    val f = for {
      annotations <- fAnnotations
      edits <- fEdits
      users <- fUsers
    } yield (annotations, edits, users)

    f.map { case (annotations, edits, users) =>
      jsonOk(Json.obj(
        "annotations" -> annotations,
        "edits" -> edits,
        "users" -> users
      ))
    }
  }

  def sitemap() = Action.async { implicit request =>
    documents.listOwnersWithPublicDocuments().map { users =>
      val baseURL = routes.LandingController.index().absoluteURL()
      val sitemap = users.map(user => s"${baseURL}${user}").mkString("\n")
      Ok(sitemap).as("text/plain")
    }
  }

  def robots() = Action { implicit request =>
    val sitemapURL = routes.LandingController.sitemap().absoluteURL()
    Ok(s"SITEMAP: ${sitemapURL}").as("text/plain")
  }

  def swaggerConfig() = Action { implicit request => 
    val json = Json.parse(new FileInputStream("conf/swagger.json"))
    val baseURL = new URI(routes.LandingController.index.absoluteURL)
    val host = if (baseURL.getPort == -1) baseURL.getHost else s"${baseURL.getHost}:${baseURL.getPort}"
    jsonOk(json.as[JsObject] ++ Json.obj("host" -> host))
  }

} 
Example 109
Source File: DumpLoader.scala    From recogito2   with Apache License 2.0 5 votes vote down vote up
package controllers.admin.authorities

import java.io.{InputStream, File, FileInputStream}
import java.util.zip.GZIPInputStream
import play.api.Logger
import scala.concurrent.{Await, ExecutionContext}
import scala.concurrent.duration._
import services.entity.EntityRecord
import services.entity.builtin.importer.EntityImporter

class DumpLoader {
  
  private def getStream(file: File, filename: String) =
    if (filename.endsWith(".gz"))
      new GZIPInputStream(new FileInputStream(file))
    else
      new FileInputStream(file)
  
  def importDump(file: File, filename: String, crosswalk: InputStream => Seq[EntityRecord], importer: EntityImporter)(implicit ctx: ExecutionContext) = {
    val records = crosswalk(getStream(file, filename))
    Logger.info("Importing " + records.size + " records")
    Await.result(importer.importRecords(records), 60.minute)   
  }
  
} 
Example 110
Source File: BackupWriter.scala    From recogito2   with Apache License 2.0 5 votes vote down vote up
package controllers.document

import controllers.HasConfig
import java.io.{File, FileInputStream, FileOutputStream, BufferedInputStream, ByteArrayInputStream, InputStream, PrintWriter}
import java.nio.file.Paths
import java.math.BigInteger
import java.security.{MessageDigest, DigestInputStream}
import java.util.UUID
import java.util.zip.{ZipEntry, ZipOutputStream}
import services.HasDate
import services.annotation.{Annotation, AnnotationService}
import services.document.{ExtendedDocumentMetadata, DocumentToJSON}
import services.generated.tables.records.{DocumentRecord, DocumentFilepartRecord}
import play.api.libs.json.Json
import play.api.libs.Files.TemporaryFileCreator
import scala.concurrent.{ExecutionContext, Future}
import storage.TempDir
import storage.uploads.Uploads

trait BackupWriter extends HasBackupValidation { self: HasConfig =>
  
  // Frontend annotation format
  import services.annotation.FrontendAnnotation._
  
  private val BUFFER_SIZE = 2048
  
  private def writeToZip(inputStream: InputStream, filename: String, zip: ZipOutputStream) = {
    zip.putNextEntry(new ZipEntry(filename))
     
    val md = MessageDigest.getInstance(ALGORITHM)    
    val in = new DigestInputStream(new BufferedInputStream(inputStream), md)

    var data= new Array[Byte](BUFFER_SIZE)
    var count: Int = 0

    while ({ count = in.read(data, 0, BUFFER_SIZE); count } > -1) {
      zip.write(data, 0, count)
    }

    in.close()
    zip.closeEntry()
    
    new BigInteger(1, md.digest()).toString(16)
  }
  
  def createBackup(doc: ExtendedDocumentMetadata)(implicit ctx: ExecutionContext, uploads: Uploads, 
      annotations: AnnotationService, tmpFile: TemporaryFileCreator): Future[File] = {
    
    def getFileAsStream(owner: String, documentId: String, filename: String) = {
      val dir = uploads.getDocumentDir(owner, documentId).get // Fail hard if the dir doesn't exist
      new FileInputStream(new File(dir, filename))
    }
    
    def getManifestAsStream() = {
      val manifest = "Recogito-Version: 2.0.1-alpha"
      new ByteArrayInputStream(manifest.getBytes)
    }
    
    def getMetadataAsStream(doc: ExtendedDocumentMetadata) = {
      
      // DocumentRecord JSON serialization
      import services.document.DocumentToJSON._
      
      val json = Json.prettyPrint(Json.toJson((doc.document, doc.fileparts)))
      new ByteArrayInputStream(json.getBytes)
    }
    
    def getAnnotationsAsStream(docId: String, annotations: Seq[Annotation], parts: Seq[DocumentFilepartRecord]): InputStream = {
      val path = Paths.get(TempDir.get()(self.config), s"${docId}_annotations.json")
      val tmp = tmpFile.create(path)
      val writer = new PrintWriter(path.toFile)
      annotations.foreach(a => writer.println(Json.stringify(Json.toJson(a))))
      writer.close()
      new FileInputStream(path.toFile)
    }
    
    Future {
      tmpFile.create(Paths.get(TempDir.get()(self.config), s"${doc.id}.zip"))
    } flatMap { zipFile =>
      val zipStream = new ZipOutputStream(new FileOutputStream(zipFile.path.toFile))

      writeToZip(getManifestAsStream(), "manifest", zipStream)
      val metadataHash = writeToZip(getMetadataAsStream(doc), "metadata.json", zipStream)

      val fileHashes = doc.fileparts.map { part =>
        writeToZip(getFileAsStream(doc.ownerName, doc.id, part.getFile), "parts" + File.separator + part.getFile, zipStream)
      }

      annotations.findByDocId(doc.id).map { annotations =>
        val annotationsHash = writeToZip(getAnnotationsAsStream(doc.id, annotations.map(_._1), doc.fileparts), "annotations.jsonl", zipStream)
        
        val signature = computeSignature(metadataHash, fileHashes, annotationsHash)
        writeToZip(new ByteArrayInputStream(signature.getBytes), "signature", zipStream)
        
        zipStream.close()
        zipFile.path.toFile
      }
    }
  }
  
} 
Example 111
Source File: PelagiosRDFCrosswalkSpec.scala    From recogito2   with Apache License 2.0 5 votes vote down vote up
package services.entity.builtin.importer.crosswalks.rdf

import com.vividsolutions.jts.geom.{Coordinate, GeometryFactory}
import java.io.{File, FileInputStream}
import org.specs2.mutable._
import org.specs2.runner._
import org.junit.runner._
import play.api.test._
import play.api.test.Helpers._
import services.entity.{Description, Name, TemporalBounds}

@RunWith(classOf[JUnitRunner])
class PelagiosRDFCrosswalkSpec extends Specification {

  private val GAZETTEER_RDF = new File("test/resources/services/entity/gazetteer_sample_pleiades.ttl")

  "The Pelagios Gazetter RDF crosswalk" should {

    val records = PelagiosRDFCrosswalk.readFile(GAZETTEER_RDF, "http://pleiades.stoa.org")

    "properly load all gazetteer records from RDF" in {
      records.size must equalTo(5)

      val expectedTitles = Seq(
        "Col. Barcino",
        "Mun. Vindobona",
        "Vindobona",
        "Thessalonica",
        "Lancaster")
      records.map(_.title) must containAllOf(expectedTitles)
    }

    "normalize URIs correctly" in {
      val expectedURIs = Seq(
        "http://pleiades.stoa.org/places/246343",
        "http://pleiades.stoa.org/places/128460",
        "http://pleiades.stoa.org/places/128537",
        "http://pleiades.stoa.org/places/491741",
        "http://pleiades.stoa.org/places/89222")
      records.map(_.uri) must containAllOf(expectedURIs)
    }

    "properly import all properties of the test record" in {
      val testRecord = records.find(_.uri == "http://pleiades.stoa.org/places/128460").get

      testRecord.sourceAuthority must equalTo ("gazetteer_sample_pleiades")

      testRecord.title must equalTo("Mun. Vindobona")

      testRecord.subjects.size must equalTo(1)
      testRecord.subjects.head must equalTo("SETTLEMENT")

      testRecord.descriptions.size must equalTo(1)
      testRecord.descriptions.head must equalTo(Description("An ancient place, cited: BAtlas 13 B4 Mun. Vindobona"))

      val expectedNames = Seq(
          Name("Mun. Vindobona"),
          Name("Wien"),
          Name("Wien/Vienna AUS"))
      testRecord.names.size must equalTo(3)
      testRecord.names must containAllOf(expectedNames)

      val coord = new Coordinate(16.373064, 48.208982)
      val point = new GeometryFactory().createPoint(coord)
      testRecord.geometry must equalTo(Some(point))
      testRecord.representativePoint must equalTo(Some(coord))

      testRecord.temporalBounds must equalTo(Some(TemporalBounds.fromYears(-20, 630)))
      testRecord.links.size must equalTo(0)
    }

  }

} 
Example 112
Source File: Analyze.scala    From Mastering-Spark-for-Data-Science   with MIT License 5 votes vote down vote up
package controllers

import java.io.{File, FileInputStream}
import java.util.UUID

import com.typesafe.config.ConfigFactory
import io.gzet.recommender.Audio
import models.Songs
import play.api.Logger
import play.api.mvc._
import svc.{AnalyzerSvc, CassandraDao, SparkSvc}

object Analyze extends Controller {

  val config = ConfigFactory.load()
  val minTime = config.getInt("gzet.min.time")
  val maxTime = config.getInt("gzet.max.time")
  val cassandraHost = config.getString("cassandra.host")
  val cassandraPort = config.getInt("cassandra.port")
  val sampleSize = config.getDouble("gzet.sample.size")
  val minMatch = config.getDouble("gzet.min.match")

  val dao = new CassandraDao(cassandraHost, cassandraPort)
  val analyzer = new AnalyzerSvc()
  val spark = new SparkSvc()

  def index = Action { implicit request =>
    val songs = Songs(dao.getSongs)
    Logger.info(s"Database is currently ${songs.songs.size} songs long")
    Ok(views.html.analyze("Select a wav file to analyze")(songs))
  }

  def submit = Action(parse.multipartFormData) { request =>
    val songs = Songs(dao.getSongs)
    Logger.info(s"Database is currently ${songs.songs.size} songs long")
    if(songs.songs.isEmpty) {
      Redirect(routes.Analyze.index()).flashing("warning" -> s"Library is currently empty. Please index new records")
    } else {
      request.body.file("song").map { upload =>
        val fileName = upload.filename
        Logger.info(s"Processing file $fileName")
        val file = new File(s"/tmp/${UUID.randomUUID()}")
        upload.ref.moveTo(file)
        try {
          val song = process(file)
          if(song.isEmpty) {
            Redirect(routes.Analyze.index()).flashing("warning" -> s"Could not match any record for [$fileName]")
          } else {
            val songName = song.get
            Logger.info(s"Found song [$songName]")
            Redirect(routes.Analyze.index()).flashing("success" -> songName)
          }
        } catch {
          case e: Exception =>
            Redirect(routes.Analyze.index()).flashing("error" -> e.getMessage)
        }
      }.getOrElse {
        Redirect(routes.Analyze.index()).flashing("error" -> "Missing file")
      }
    }
  }

  def process(file: File) = {
    val is = new FileInputStream(file)
    val audio = Audio.processSong(is, minTime, maxTime)
    Logger.info(audio.toString)
    file.delete()
    analyzer.analyze(audio)
  }

} 
Example 113
Source File: AudioLibrary.scala    From Mastering-Spark-for-Data-Science   with MIT License 5 votes vote down vote up
package io.gzet.recommender

import java.io.{File, FileInputStream}

import org.apache.spark.SparkContext

object AudioLibrary {

  def read(library: String, sc: SparkContext, minTime: Long = 0, maxTime: Long = 20000) = {
    sc binaryFiles library filter { case (file, stream) =>
      file.endsWith(".wav")
    } map { case (file, stream) =>
      val fileName = new File(file).getName
      val audio = Audio.processSong(stream.open(), minTime, maxTime)
      (fileName, audio)
    }
  }

  def readFile(song: String, minTime: Long = 0, maxTime: Long = Long.MaxValue) = {
    val is = new FileInputStream(song)
    Audio.processSong(is, minTime, maxTime)
  }

} 
Example 114
Source File: GenerationContext.scala    From scalingua   with Apache License 2.0 5 votes vote down vote up
package ru.makkarpov.scalingua.plugin

import java.io.{BufferedReader, DataInputStream, FileInputStream, InputStreamReader}
import java.nio.charset.StandardCharsets

import ru.makkarpov.scalingua.LanguageId
import sbt._

object GenerationContext {
  val HashMarker = "## Hash: ## "
  val ScalaHashPrefix = s"// $HashMarker"
}

case class GenerationContext(pkg: String, implicitCtx: Option[String], lang: LanguageId, hasTags: Boolean,
                             src: File, target: File, log: Logger)
{
  val srcHash = src.hashString

  def mergeContext(ctx: Option[String]): Option[String] = (implicitCtx, ctx) match {
    case (None,    None)    => None
    case (Some(x), None)    => Some(x)
    case (None,    Some(y)) => Some(y)
    case (Some(x), Some(y)) => Some(x + ":" + y)
  }

  def filePrefix = "/" + pkg.replace('.', '/') + (if (pkg.nonEmpty) "/" else "")

  def checkBinaryHash: Boolean = target.exists() && {
    val storedHash = {
      val is = new DataInputStream(new FileInputStream(target))
      try is.readUTF()
      catch {
        case t: Throwable =>
          t.printStackTrace()
          ""
      } finally is.close()
    }

    srcHash == storedHash
  }

  def checkTextHash: Boolean = target.exists() && {
    import GenerationContext.HashMarker

    val storedHash = {
      val rd = new BufferedReader(new InputStreamReader(new FileInputStream(target), StandardCharsets.UTF_8))
      try {
        val l = rd.readLine()
        if ((l ne null) && l.contains(HashMarker)) {
          val idx = l.indexOf(HashMarker)
          l.substring(idx + HashMarker.length)
        } else ""
      } catch {
        case t: Throwable =>
          t.printStackTrace()
          ""
      } finally rd.close()
    }

    srcHash == storedHash
  }
} 
Example 115
Source File: TaggedParser.scala    From scalingua   with Apache License 2.0 5 votes vote down vote up
package ru.makkarpov.scalingua.extract

import java.io.{File, FileInputStream, InputStreamReader}
import java.nio.charset.StandardCharsets

import com.grack.nanojson.{JsonObject, JsonParser, JsonParserException}
import ru.makkarpov.scalingua.pofile.Message.{Plural, Singular}
import ru.makkarpov.scalingua.pofile._
import ru.makkarpov.scalingua.Compat.CollectionConverters._

object TaggedParser {
  val TaggedFileName = "tagged-messages.json"

  case class TaggedMessage(tag: String, msg: String, plural: Option[String], comment: Seq[String]) {
    def toMessage: Message = {
      val header = MessageHeader(comment, Nil, MessageLocation(TaggedFileName) :: Nil, MessageFlag.empty, Some(tag))

      plural match {
        case None => Singular(header, None, MultipartString(msg), MultipartString.empty)
        case Some(p) => Plural(header, None, MultipartString(msg), MultipartString(p),
          Seq(MultipartString.empty, MultipartString.empty))
      }
    }
  }

  
  def parse(f: File): Seq[TaggedMessage] = {
    val ret = Vector.newBuilder[TaggedMessage]

    try {
      val obj = {
        val r = new InputStreamReader(new FileInputStream(f), StandardCharsets.UTF_8)
        try JsonParser.`object`().from(r) finally r.close()
      }

      for (k <- obj.keySet().asScala) obj.get(k) match {
        case v: JsonObject =>
          if (!v.has("message"))
            throw TaggedParseException(s"Object with key '$k' has no 'message' field")

          if (!v.isString("message"))
            throw TaggedParseException(s"Object with key '$k' has non-string 'message' field")

          val msg = v.getString("message")

          val plural =
            if (v.has("plural")) {
              if (!v.isString("plural"))
                throw TaggedParseException(s"Object with key '$k' has non-string 'plural' field")
              Some(v.getString("plural"))
            } else None

          val comments =
            if (v.has("comments")) {
              if (v.isString("comments")) v.getString("comments") :: Nil
              else v.getArray("comments").asScala.toList.map(_.asInstanceOf[String])
            } else Nil

          ret += TaggedMessage(k, msg, plural, comments)

        case v: String =>
          ret += TaggedMessage(k, v, None, Nil)
      }
    } catch {
      case e: JsonParserException =>
        throw new TaggedParseException(s"Tagged JSON syntax error at ${f.getCanonicalPath}:${e.getLinePosition}:${e.getCharPosition}", e)
    }

    ret.result()
  }
} 
Example 116
Source File: Credentials.scala    From sbt-coursier   with Apache License 2.0 5 votes vote down vote up
package coursier

import java.io.{File, FileInputStream}
import java.util.Properties

import lmcoursier.definitions.Authentication

// actually deprecated (all public ways of creating that are)
sealed abstract class Credentials extends Product with Serializable {
  def user: String
  def password: String

  def authentication: Authentication =
    Authentication(user, password)
}

object Credentials {

  private final case class Direct(user: String, password: String) extends Credentials {
    override def toString = s"Direct($user, ******)"
  }

  private final case class FromFile(file: File) extends Credentials {

    private lazy val props = {
      val p = new Properties()
      p.load(new FileInputStream(file))
      p
    }

    private def findKey(keys: Seq[String]) = keys
      .iterator
      .map(props.getProperty)
      .filter(_ != null)
      .toStream
      .headOption
      .getOrElse {
        throw new NoSuchElementException(s"${keys.head} key in $file")
      }

    lazy val user: String = findKey(FromFile.fileUserKeys)
    lazy val password: String = findKey(FromFile.filePasswordKeys)
  }

  private object FromFile {
    // from sbt.Credentials
    private val fileUserKeys = Seq("user", "user.name", "username")
    private val filePasswordKeys = Seq("password", "pwd", "pass", "passwd")
  }


  @deprecated("Use coursierExtraCredentials rather than coursierCredentials", "1.1.0-M14")
  def apply(user: String, password: String): Credentials =
    Direct(user, password)

  @deprecated("Use coursierExtraCredentials rather than coursierCredentials", "1.1.0-M14")
  def apply(file: File): Credentials =
    FromFile(file)

} 
Example 117
Source File: LagomOpenApiGenerator.scala    From sbt-lagom-descriptor-generator   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.spec.sbt

import java.io.FileInputStream

import com.lightbend.lagom.spec.LagomGeneratorTypes.GeneratedCode
import com.lightbend.lagom.spec.sbt.LagomOpenApiPlugin.autoImport._
import com.lightbend.lagom.spec.{ LagomGeneratorTypes, LagomGenerators, ResourceUtils }
import sbt.Keys._
import sbt._

object LagomOpenApiGenerator {

  
  def dslDiscoveryTask: Def.Initialize[Task[String]] = Def.task {
    // TODO: not sure this is the best approach but
    val deps = allDependencies.value
    if (deps.exists(_.name.contains("lagom-javadsl"))) {
      "java"
    } else if (deps.exists(_.name.contains("lagom-scaladsl"))) {
      "scala"
    } else {
      throw new IllegalArgumentException(s"Can't determine the target language.")
    }
  }

  def lagomOpenAPIGenerateDescriptorTask(): Def.Initialize[Task[Seq[File]]] = Def.task {
    val lang = dslDiscoveryTask.value
    val packageName = organization.value
    val specFiles: Seq[File] = (sources in lagomOpenAPIGenerateDescriptor).value
    val outputDirectory: File = (target in lagomOpenAPIGenerateDescriptor).value

    specFiles.flatMap { specFile =>
      val targetDir = new File(outputDirectory, lang)
      val serviceName = extractServiceName(specFile.getName)
      val output = generate(lang, specFile, packageName, serviceName)

      val generatedFiles: Seq[File] =
        writeFile(targetDir, output.descriptor) +: output.models.map { case (_, genCode) => writeFile(targetDir, genCode) }.toSeq
      generatedFiles
    }
  }

  private def generate(lang: String, specFile: File, packageName: String, serviceName: String): LagomGeneratorTypes.Output = {
    // Decide formatter.
    lang match {
      case "java" =>
        LagomGenerators.openApiV2ToLagomJava(new FileInputStream(specFile.getAbsoluteFile), packageName, serviceName)
      case _ =>
        LagomGenerators.openApiV2ToLagomScala(new FileInputStream(specFile.getAbsoluteFile), packageName, serviceName)
    }
  }

  private def writeFile(folder: File, content: GeneratedCode): File =
    ResourceUtils.writeFile(folder, content.relativeFile, content.fileContents)

  private def extractServiceName(filename: String): String = {
    filename.reverse.dropWhile(_ != '.').drop(1).reverse
  }

} 
Example 118
Source File: ReThinkConnection.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.rethink

import java.io.{BufferedInputStream, FileInputStream}

import com.datamountaineer.streamreactor.connect.rethink.config.ReThinkConfigConstants
import com.rethinkdb.RethinkDB
import com.rethinkdb.net.Connection
import com.typesafe.scalalogging.StrictLogging
import org.apache.kafka.common.config.AbstractConfig
import org.apache.kafka.connect.errors.ConnectException


object ReThinkConnection extends StrictLogging {
  def apply(r: RethinkDB, config: AbstractConfig): Connection = {

    val host = config.getString(ReThinkConfigConstants.RETHINK_HOST)
    val port = config.getInt(ReThinkConfigConstants.RETHINK_PORT)
    val username = config.getString(ReThinkConfigConstants.USERNAME)
    val password = config.getPassword(ReThinkConfigConstants.PASSWORD).value()
    val certFile = config.getString(ReThinkConfigConstants.CERT_FILE)
    val authKey = config.getPassword(ReThinkConfigConstants.AUTH_KEY)

    //java driver also catches this
    if (username.nonEmpty && certFile.nonEmpty) {
      throw new ConnectException("Username and Certificate file can not be used together.")
    }

    if ((certFile.nonEmpty && config.getPassword(ReThinkConfigConstants.AUTH_KEY).value().isEmpty)
      || certFile.isEmpty && config.getPassword(ReThinkConfigConstants.AUTH_KEY).value().nonEmpty
    ) {
      throw new ConnectException("Both the certificate file and authentication key must be set for secure TLS connections.")
    }

    val builder = r.connection()
      .hostname(host)
      .port(port)

    if (!username.isEmpty) {
      logger.info("Adding username/password credentials to connection")
      builder.user(username, password)
    }

    if (!certFile.isEmpty) {
      logger.info(s"Using certificate file ${certFile} for TLS connection, overriding any SSLContext")
      val is = new BufferedInputStream(new FileInputStream(certFile))
      builder.certFile(is)
    }

    if (!authKey.value().isEmpty) {
      logger.info("Set authorization key")
      builder.authKey(authKey.value())
    }

    builder.connect()
  }
} 
Example 119
Source File: DTLSConnectionFn.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.coap.connection

import java.io.FileInputStream
import java.net.{ConnectException, InetAddress, InetSocketAddress, URI}
import java.security.cert.Certificate
import java.security.{KeyStore, PrivateKey}

import com.datamountaineer.streamreactor.connect.coap.configs.{CoapConstants, CoapSetting}
import com.typesafe.scalalogging.StrictLogging
import org.eclipse.californium.core.CoapClient
import org.eclipse.californium.core.coap.CoAP
import org.eclipse.californium.core.network.CoapEndpoint
import org.eclipse.californium.core.network.config.NetworkConfig
import org.eclipse.californium.scandium.DTLSConnector
import org.eclipse.californium.scandium.config.DtlsConnectorConfig
import org.eclipse.californium.scandium.dtls.cipher.CipherSuite
import org.eclipse.californium.scandium.dtls.pskstore.InMemoryPskStore


  def discoverServer(address: String, uri: URI): URI = {
    val client = new CoapClient(s"${uri.getScheme}://$address:${uri.getPort.toString}/.well-known/core")
    client.useNONs()
    val response = client.get()

    if (response != null) {
      logger.info(s"Discovered Server ${response.advanced().getSource.toString}.")
      new URI(uri.getScheme,
        uri.getUserInfo,
        response.advanced().getSource.getHostName,
        response.advanced().getSourcePort,
        uri.getPath,
        uri.getQuery,
        uri.getFragment)
    } else {
      logger.error(s"Unable to find any servers on local network with multicast address $address.")
      throw new ConnectException(s"Unable to find any servers on local network with multicast address $address.")
    }
  }
} 
Example 120
Source File: FileIO.scala    From korolev   with Apache License 2.0 5 votes vote down vote up
package korolev.effect.io

import java.io.{BufferedReader, FileInputStream, FileOutputStream, FileReader}
import java.nio.file.Path

import korolev.effect.syntax._
import korolev.effect.{Effect, Stream}

object FileIO {

  def readBytes[F[_]: Effect](path: Path): F[LazyBytes[F]] = {
    val inputStream = new FileInputStream(path.toFile)
    LazyBytes.fromInputStream(inputStream)
  }

  def readLines[F[_]: Effect](path: Path): F[Stream[F, String]] = {
    Stream.unfoldResource[F, BufferedReader, Unit, String](
      default = (),
      create = Effect[F].delay(new BufferedReader(new FileReader(path.toFile))),
      loop = (reader, _) => Effect[F].delay {
        ((), Option(reader.readLine()))
      }
    )
  }

  
  def write[F[_]: Effect](path: Path, append: Boolean = false): Stream[F, Array[Byte]] => F[Unit] = { stream =>
    val outputStream = new FileOutputStream(path.toFile, append)
    def aux(): F[Unit] = {
      stream.pull().flatMap {
        case Some(chunk) => Effect[F]
          .delay(outputStream.write(chunk))
          .after(aux())
          .recover {
            case error =>
              outputStream.close()
              throw error
          }
        case None =>
          Effect[F].delay(outputStream.close())
      }
    }
    aux()
  }
} 
Example 121
Source File: TransformerBenchmark.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package com.truecar.mleap.spark.benchmark

import java.io.{FileInputStream, File}

import ml.bundle.fs.DirectoryBundle
import com.truecar.mleap.runtime.LocalLeapFrame
import com.truecar.mleap.runtime.transformer.Transformer
import com.truecar.mleap.serialization.ml.v1.MlJsonSerializer
import org.scalameter.api._
import org.scalameter.picklers.Implicits._
import spray.json._
import com.truecar.mleap.serialization.mleap.v1.MleapJsonSupport._


object TransformerBenchmark extends Bench.ForkedTime {
  lazy override val executor = {
    SeparateJvmsExecutor(
      Executor.Warmer.Zero,
      Aggregator.min[Double],
      new Measurer.Default)
  }

  val mlSerializer = MlJsonSerializer
  val classLoader = getClass.getClassLoader
  val regressionFile = new File("/tmp/transformer.ml")
  val frameFile = new File("/tmp/frame.json")

  val bundleReader = DirectoryBundle(regressionFile)
  val regression = mlSerializer.deserializeWithClass(bundleReader).asInstanceOf[Transformer]

  val lines = scala.io.Source.fromFile(frameFile).mkString
  val frame = lines.parseJson.convertTo[LocalLeapFrame]

  val ranges = for {
    size <- Gen.range("size")(1000, 10000, 1000)
  } yield 0 until size

  measure method "transform" in {
    using(ranges) in {
      size =>
        size.foreach {
          _ => regression.transform(frame)
        }
    }
  }
} 
Example 122
Source File: SparkTransformerBenchmark.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package com.truecar.mleap.spark.benchmark

import java.io.{FileInputStream, File}

import com.esotericsoftware.kryo.io.Input
import com.truecar.mleap.runtime.LocalLeapFrame
import com.truecar.mleap.spark.benchmark.util.SparkSerializer
import org.apache.spark.sql.{Row, SQLContext}
import org.apache.spark.{SparkContext, SparkConf}
import org.apache.spark.ml.Transformer
import org.scalameter.Bench
import scala.collection.JavaConverters._
import org.scalameter.api._
import org.scalameter.picklers.Implicits._
import org.apache.log4j.Logger
import org.apache.log4j.Level
import com.truecar.mleap.spark.MleapSparkSupport._
import spray.json._
import com.truecar.mleap.serialization.mleap.v1.MleapJsonSupport._


object SparkTransformerBenchmark extends Bench.ForkedTime {
  lazy override val executor = {
    SeparateJvmsExecutor(
      Executor.Warmer.Zero,
      Aggregator.min[Double],
      new Measurer.Default)
  }

  val classLoader = getClass.getClassLoader
  val regressionFile = new File("/tmp/spark.transformer.kryo")
  val frameFile = new File("/tmp/frame.json")

  val inputStream = new FileInputStream(regressionFile)
  val input = new Input(inputStream)

  val regression: Transformer = SparkSerializer().read(input)
  val lines = scala.io.Source.fromFile(frameFile).mkString
  val frame = lines.parseJson.convertTo[LocalLeapFrame]

  Logger.getLogger("org").setLevel(Level.OFF)
  Logger.getLogger("akka").setLevel(Level.OFF)

  val sparkConf = new SparkConf()
    .setAppName("Spark Transformer Benchmark")
    .setMaster("local[1]")
  val sc = new SparkContext(sparkConf)
  val sqlContext = new SQLContext(sc)

  val rdd = frame.dataset.data.map(a => Row(a.toSeq: _*)).toList.asJava
  val schema = frame.schema.toSpark
  val sparkFrame = sqlContext.createDataFrame(rdd, schema)

  val ranges = for {
    size <- Gen.range("size")(1000, 10000, 1000)
  } yield 0 until size

  measure method "transform" in {
    using(ranges) in {
      size =>
        size.foreach {
          _ => regression.transform(sparkFrame).head
        }
    }
  }

//  sc.stop()
} 
Example 123
Source File: Config.scala    From zipkin-mesos-framework   with Apache License 2.0 5 votes vote down vote up
package net.elodina.mesos.zipkin

import java.io.{File, FileInputStream}
import java.net.URI
import java.util.Properties

import net.elodina.mesos.zipkin.utils.{BindAddress, Period}

object Config {
  val DEFAULT_FILE = new File("zipkin-mesos.properties")

  var debug: Boolean = false
  var genTraces: Boolean = false
  var storage: String = "file:zipkin-mesos.json"

  var master: Option[String] = None
  var principal: Option[String] = None
  var secret: Option[String] = None
  var user: Option[String] = None

  var frameworkName: String = "zipkin"
  var frameworkRole: String = "*"
  var frameworkTimeout: Period = new Period("30d")

  var log: Option[File] = None
  var api: Option[String] = None
  var bindAddress: Option[BindAddress] = None

  def apiPort: Int = {
    val port = new URI(getApi).getPort
    if (port == -1) 80 else port
  }

  def replaceApiPort(port: Int): Unit = {
    val prev: URI = new URI(getApi)
    api = Some("" + new URI(
      prev.getScheme, prev.getUserInfo,
      prev.getHost, port,
      prev.getPath, prev.getQuery, prev.getFragment
    ))
  }

  def getApi: String = {
    api.getOrElse(throw new Error("api not initialized"))
  }

  def getMaster: String = {
    master.getOrElse(throw new Error("master not initialized"))
  }

  def getZk: String = {
    master.getOrElse(throw new Error("zookeeper not initialized"))
  }

  private[zipkin] def loadFromFile(file: File): Unit = {
    val props: Properties = new Properties()
    val stream: FileInputStream = new FileInputStream(file)

    props.load(stream)
    stream.close()

    if (props.containsKey("debug")) debug = java.lang.Boolean.valueOf(props.getProperty("debug"))
    if (props.containsKey("genTraces")) genTraces = java.lang.Boolean.valueOf(props.getProperty("genTraces"))
    if (props.containsKey("storage")) storage = props.getProperty("storage")

    if (props.containsKey("master")) master = Some(props.getProperty("master"))
    if (props.containsKey("user")) user = Some(props.getProperty("user"))
    if (props.containsKey("principal")) principal = Some(props.getProperty("principal"))
    if (props.containsKey("secret")) secret = Some(props.getProperty("secret"))

    if (props.containsKey("framework-name")) frameworkName = props.getProperty("framework-name")
    if (props.containsKey("framework-role")) frameworkRole = props.getProperty("framework-role")
    if (props.containsKey("framework-timeout")) frameworkTimeout = new Period(props.getProperty("framework-timeout"))

    if (props.containsKey("log")) log = Some(new File(props.getProperty("log")))
    if (props.containsKey("api")) api = Some(props.getProperty("api"))
    if (props.containsKey("bind-address")) bindAddress = Some(new BindAddress(props.getProperty("bind-address")))
  }

  override def toString: String = {
    s"""
       |debug: $debug, storage: $storage
        |mesos: master=$master, user=${if (user.isEmpty || user.get.isEmpty) "<default>" else user}
        |principal=${principal.getOrElse("<none>")}, secret=${if (secret.isDefined) "*****" else "<none>"}
        |framework: name=$frameworkName, role=$frameworkRole, timeout=$frameworkTimeout
        |api: $api, bind-address: ${bindAddress.getOrElse("<all>")}, genTraces: $genTraces
    """.stripMargin.trim
  }
} 
Example 124
Source File: GBDTModel.scala    From sona   with Apache License 2.0 5 votes vote down vote up
package com.tencent.angel.sona.tree.gbdt

import java.io.{FileInputStream, FileOutputStream, ObjectInputStream, ObjectOutputStream}

import com.tencent.angel.sona.tree.gbdt.tree.{GBDTParam, GBTNode}
import com.tencent.angel.sona.tree.regression.RegTree
import org.apache.spark.ml.linalg.Vector

import scala.collection.mutable.ArrayBuffer

object GBDTModel {
  type GBTTree = RegTree[GBTNode]

  def save(model: GBDTModel, path: String): Unit = {
    val oos = new ObjectOutputStream(new FileOutputStream(path))
    oos.writeObject(model)
    oos.close()
  }

  def load(path: String): GBDTModel = {
    val ois = new ObjectInputStream(new FileInputStream(path))
    ois.readObject().asInstanceOf[GBDTModel]
  }
}

import GBDTModel._
class GBDTModel(val param: GBDTParam) extends Serializable {
  private var forest: ArrayBuffer[GBTTree] = ArrayBuffer[GBTTree]()
  private var weights: ArrayBuffer[Float] = ArrayBuffer[Float]()

  def predict(instance: Vector): Array[Float] = {
    if (param.isRegression || param.numClass == 2) {
      var pred = 0.0f
      for (i <- forest.indices)
        pred += weights(i) * forest(i).predictBinary(instance)
      Array(pred)
    } else if (param.multiTree) {
      val preds = Array.ofDim[Float](param.numClass)
      for (i <- forest.indices)
        preds(i % param.numClass) += weights(i) *
          forest(i).predictBinary(instance)
      preds
    } else {
      val preds = Array.ofDim[Float](param.numClass)
      for (i <- forest.indices) {
        val p = forest(i).predictMulti(instance)
        val w = weights(i)
        for (k <- 0 until param.numClass)
          preds(k) += w * p(k)
      }
      preds
    }
  }

  def predict(instances: Array[Vector]): Array[Array[Float]] = {
    instances.map(predict)
  }

  def get(treeId: Int): GBTTree = forest(treeId)

  def add(tree: GBTTree, weight: Float): Unit = {
    forest += tree
    weights += weight
  }

  def keepFirstTrees(num: Int): Unit = {
    forest = forest.slice(0, num)
    weights = weights.slice(0, num)
  }

  def numTree: Int = forest.size
} 
Example 125
Source File: VwSparseMultilabelPredictorTest.scala    From aloha   with MIT License 5 votes vote down vote up
package com.eharmony.aloha.models.vw.jni.multilabel

import java.io.{ByteArrayOutputStream, File, FileInputStream}

import com.eharmony.aloha.ModelSerializationTestHelper
import com.eharmony.aloha.io.sources.{Base64StringSource, ExternalSource, ModelSource}
import org.apache.commons.codec.binary.Base64
import org.apache.commons.io.IOUtils
import org.junit.Assert._
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.BlockJUnit4ClassRunner
import vowpalWabbit.learner.{VWActionScoresLearner, VWLearners}


@RunWith(classOf[BlockJUnit4ClassRunner])
class VwSparseMultilabelPredictorTest extends ModelSerializationTestHelper {
  import VwSparseMultilabelPredictorTest._

  @Test def testSerializability(): Unit = {
    val predictor = getPredictor(getModelSource(), 3)
    val ds = serializeDeserializeRoundTrip(predictor)
    assertEquals(predictor, ds)
    assertEquals(predictor.vwParams(), ds.vwParams())
    assertNotNull(ds.vwModel)
  }

  @Test def testVwParameters(): Unit = {
    val numLabelsInTrainingSet = 3
    val predictor = getPredictor(getModelSource(), numLabelsInTrainingSet)

    predictor.vwParams() match {
      case Data(vwBinFilePath, ringSize) =>
        checkVwBinFile(vwBinFilePath)
        checkVwRingSize(numLabelsInTrainingSet, ringSize.toInt)
      case ps => fail(s"Unexpected VW parameters format.  Found string: $ps")
    }
  }
}

object VwSparseMultilabelPredictorTest {
  private val Data = """\s*-i\s+(\S+)\s+--ring_size\s+(\d+)\s+--testonly\s+--quiet""".r

  private def getModelSource(): ModelSource = {
    val f = File.createTempFile("i_dont", "care")
    f.deleteOnExit()
    val learner = VWLearners.create[VWActionScoresLearner](s"--quiet --csoaa_ldf mc --csoaa_rank -f ${f.getCanonicalPath}")
    learner.close()
    val baos = new ByteArrayOutputStream()
    IOUtils.copy(new FileInputStream(f), baos)
    val src = Base64StringSource(Base64.encodeBase64URLSafeString(baos.toByteArray))
    ExternalSource(src.localVfs)
  }

  private def getPredictor(modelSrc: ModelSource, numLabelsInTrainingSet: Int) =
    VwSparseMultilabelPredictor[Any](modelSrc, Nil, Nil, numLabelsInTrainingSet)

  private def checkVwBinFile(vwBinFilePath: String): Unit = {
    val vwBinFile = new File(vwBinFilePath)
    assertTrue("VW binary file should have been written to disk", vwBinFile.exists())
    vwBinFile.deleteOnExit()
  }

  private def checkVwRingSize(numLabelsInTrainingSet: Int, ringSize: Int): Unit = {
    assertEquals(
      "vw --ring_size parameter is incorrect:",
      numLabelsInTrainingSet + VwSparseMultilabelPredictor.AddlVwRingSize,
      ringSize.toInt
    )
  }
} 
Example 126
Source File: FileHash.scala    From PackUpdate   with Apache License 2.0 5 votes vote down vote up
package at.chaosfield.packupdate.common

import java.io.{File, FileInputStream}

import org.apache.commons.codec.binary.Hex
import org.apache.commons.codec.digest.DigestUtils

class FileHash(data: Array[Byte]) {

  def this(data: String) = this(Hex.decodeHex(data))

  def hex: String = Hex.encodeHexString(data)
  def binary: Array[Byte] = data

  def canEqual(other: Any): Boolean = other match {
    case _: FileHash | _: String | _: Array[Byte] => true
    case _ => false
  }

  override def equals(o: Any): Boolean = this.canEqual(o) && (o match {
    case other: FileHash => data sameElements other.binary
    case other: String => data sameElements Hex.decodeHex(other)
    case other: Array[Byte] => data sameElements other
    case _ => false
  })

  override def toString: String = hex
  override def hashCode(): Int = data.hashCode
}

object FileHash {
  final val Invalid = new FileHash(new Array[Byte](20))

  def forFile(file: File): FileHash =
    new FileHash(DigestUtils.sha256(new FileInputStream(file)))
} 
Example 127
Source File: MinstDatasetReader.scala    From zen   with Apache License 2.0 5 votes vote down vote up
package com.github.cloudml.zen.ml.util

import java.io.{Closeable, DataInputStream, FileInputStream, IOException}
import java.util.zip.GZIPInputStream

import org.apache.spark.mllib.linalg.{DenseVector => SDV, Vector => SV}

case class MinstItem(label: Int, data: Array[Int]) {
  def binaryVector: SV = {
    new SDV(data.map { i =>
      if (i > 30) {
        1D
      } else {
        0D
      }
    })
  }
}

class MinstDatasetReader(labelsFile: String, imagesFile: String)
  extends java.util.Iterator[MinstItem] with Closeable with Logging {

  val labelsBuf: DataInputStream = new DataInputStream(new GZIPInputStream(
    new FileInputStream(labelsFile)))
  var magic = labelsBuf.readInt()
  val labelCount = labelsBuf.readInt()
  logInfo(s"Labels magic=$magic count= $labelCount")

  val imagesBuf: DataInputStream = new DataInputStream(new GZIPInputStream(
    new FileInputStream(imagesFile)))
  magic = imagesBuf.readInt()
  val imageCount = imagesBuf.readInt()
  val rows = imagesBuf.readInt()
  val cols = imagesBuf.readInt()
  logInfo(s"Images magic=$magic count=$imageCount rows=$rows cols=$cols")
  assert(imageCount == labelCount)

  var current = 0

  override def next(): MinstItem = {
    try {
      val data = new Array[Int](rows * cols)
      for (i <- 0 until data.length) {
        data(i) = imagesBuf.readUnsignedByte()
      }
      val label = labelsBuf.readUnsignedByte()
      MinstItem(label, data)
    } catch {
      case e: IOException =>
        current = imageCount
        throw e
    }
    finally {
      current += 1
    }
  }

  override def hasNext = current < imageCount

  override def close: Unit = {
    imagesBuf.close()
    labelsBuf.close()
  }

  override def remove {
    throw new UnsupportedOperationException("remove")
  }
} 
Example 128
Source File: IOCommon.scala    From Swallow   with Apache License 2.0 5 votes vote down vote up
package com.intel.hibench.sparkbench.common

import java.io.{File, FileInputStream, IOException, InputStreamReader}
import java.util.Properties

import org.apache.hadoop.io.compress.CompressionCodec
import org.apache.hadoop.io.{NullWritable, Text}
import org.apache.hadoop.mapred.SequenceFileOutputFormat
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkContext, SparkException}

import scala.collection.JavaConversions._
import scala.collection.mutable.HashMap
import scala.reflect.ClassTag
import scala.reflect.runtime.universe.TypeTag

class IOCommon(val sc:SparkContext) {
   def load[T:ClassTag:TypeTag](filename:String, force_format:Option[String]=None) = {
     val input_format = force_format.getOrElse(
       IOCommon.getProperty("sparkbench.inputformat").getOrElse("Text"))

     input_format match {
       case "Text" =>
         sc.textFile(filename)

       case "Sequence" =>
         sc.sequenceFile[NullWritable, Text](filename).map(_._2.toString)

       case _ => throw new UnsupportedOperationException(s"Unknown inpout format: $input_format")
     }
   }

   def save(filename:String, data:RDD[_], prefix:String) = {
     val output_format = IOCommon.getProperty(prefix).getOrElse("Text")
     val output_format_codec =
       loadClassByName[CompressionCodec](IOCommon.getProperty(prefix + ".codec"))

     output_format match {
       case "Text" =>
         if (output_format_codec.isEmpty)  data.saveAsTextFile(filename)
         else data.saveAsTextFile(filename, output_format_codec.get)

       case "Sequence" =>
         val sequence_data = data.map(x => (NullWritable.get(), new Text(x.toString)))
         if (output_format_codec.isEmpty) {
           sequence_data.saveAsHadoopFile[SequenceFileOutputFormat[NullWritable, Text]](filename)
         } else {
           sequence_data.saveAsHadoopFile[SequenceFileOutputFormat[NullWritable, Text]](filename,
             output_format_codec.get)
         }

       case _ => throw new UnsupportedOperationException(s"Unknown output format: $output_format")
     }
   }

   def save(filename:String, data:RDD[_]):Unit = save(filename, data, "sparkbench.outputformat")

   private def loadClassByName[T](name:Option[String]) = {
     if (!name.isEmpty) Some(Class.forName(name.get)
       .newInstance.asInstanceOf[T].getClass) else None
   }

   private def callMethod[T, R](obj:T, method_name:String) =
     obj.getClass.getMethod(method_name).invoke(obj).asInstanceOf[R]
 }

object IOCommon {
   private val sparkbench_conf: HashMap[String, String] =
     getPropertiesFromFile(System.getenv("SPARKBENCH_PROPERTIES_FILES"))

   def getPropertiesFromFile(filenames: String): HashMap[String, String] = {
     val result = new HashMap[String, String]
     filenames.split(',').filter(_.stripMargin.length > 0).foreach { filename =>
       val file = new File(filename)
       require(file.exists, s"Properties file $file does not exist")
       require(file.isFile, s"Properties file $file is not a normal file")

       val inReader = new InputStreamReader(new FileInputStream(file), "UTF-8")
       try {
         val properties = new Properties()
         properties.load(inReader)
         result ++= properties.stringPropertyNames()
           .map(k => (k, properties(k).trim)).toMap
       } catch {
         case e: IOException =>
           val message = s"Failed when loading Sparkbench properties file $file"
           throw new SparkException(message, e)
       } finally {
         inReader.close()
       }
     }
     result.filter{case (key, value) => value.toLowerCase != "none"}
   }

   def getProperty(key:String):Option[String] = sparkbench_conf.get(key)

   def dumpProperties(): Unit = sparkbench_conf
       .foreach{case (key, value)=> println(s"$key\t\t$value")}
 } 
Example 129
Source File: Launcher.scala    From amaterasu   with Apache License 2.0 5 votes vote down vote up
package org.apache.amaterasu.leader.mesos

import java.io.FileInputStream

import org.apache.amaterasu.common.configuration.ClusterConfig
import org.apache.amaterasu.common.logging.Logging
import org.apache.amaterasu.leader.Kami
import org.apache.amaterasu.leader.mesos.schedulers.ClusterScheduler
import org.apache.mesos.{MesosSchedulerDriver, Protos}

object Launcher extends App with Logging {

  println(
    """
       Apache
           (                      )
           )\        )      )   ( /(   (   (       )        (
          ((_)(     (     ( /(  )\()  ))\  )(   ( /(  (    ))\
         )\ _ )\    )\  ' )(_))(_))/ /((_)(()\  )(_)) )\  /((_)
         (_)_\(_) _((_)) ((_) _ | |_ (_))   ((_)((_)_ ((_)(_))(
          / _ \  | '   \()/ _` ||  _|/ -_) | '_|/ _` |(_-<| || |
         /_/ \_\ |_|_|_|  \__,_| \__|\___| |_|  \__,_|/__/ \_,_|

         Durable Dataflow Cluster
         Version 0.1.0
    """
  )

  val config = ClusterConfig(new FileInputStream("scripts/amaterasu.properties"))
  val kami = Kami(Seq("https://github.com/roadan/amaterasu-job-sample.git"))

  // for multi-tenancy reasons the name of the framework is composed out of the username ( which defaults
  // to empty string concatenated with - Amaterasu
  val framework = Protos.FrameworkInfo.newBuilder()
    .setName(s"${config.user} - Amaterasu")
    .setFailoverTimeout(config.timeout)
    .setUser(config.user).build()

  log.debug(s"The framework user is ${config.user}")
  val masterAddress = s"${config.master}:${config.masterPort}"
  val scheduler = ClusterScheduler(kami, config)
  val driver = new MesosSchedulerDriver(scheduler, framework, masterAddress)

  log.debug(s"Connecting to master on: $masterAddress")
  driver.run()

} 
Example 130
Source File: JobLauncher.scala    From amaterasu   with Apache License 2.0 5 votes vote down vote up
package org.apache.amaterasu.leader.mesos

import java.io.FileInputStream
import java.nio.file.Paths

import org.apache.amaterasu.common.logging.Logging
import org.apache.amaterasu.common.configuration.ClusterConfig
import org.apache.amaterasu.leader.mesos.schedulers.JobScheduler
import org.apache.mesos.Protos.FrameworkID
import org.apache.mesos.{MesosSchedulerDriver, Protos}

case class Args(
                 repo: String = "",
                 branch: String = "master",
                 env: String = "default",
                 name: String = "amaterasu-job",
                 jobId: String = null,
                 report: String = "code",
                 home: String = ""
               )


object JobLauncher extends App with Logging {


  val parser = new scopt.OptionParser[Args]("amaterasu job") {
    head("amaterasu job", "0.2.0-incubating") //TODO: Get the version from the build

    opt[String]('r', "repo") action { (x, c) =>
      c.copy(repo = x)
    } text "The git repo containing the job"
    opt[String]('b', "branch") action { (x, c) =>
      c.copy(branch = x)
    } text "The branch to be executed (default is master)"
    opt[String]('e', "env") action { (x, c) =>
      c.copy(env = x)
    } text "The environment to be executed (test, prod, etc. values from the default env are taken if np env specified)"
    opt[String]('n', "name") action { (x, c) =>
      c.copy(name = x)
    } text "The name of the job"
    opt[String]('i', "job-id") action { (x, c) =>
      c.copy(jobId = x)
    } text "The jobId - should be passed only when resuming a job"
    opt[String]('r', "report") action { (x, c) =>
      c.copy(report = x)
    }
    opt[String]('h', "home") action { (x, c) =>
      c.copy(home = x)
    } text "The level of reporting"

  }


  parser.parse(args, Args()) match {

    case Some(arguments) =>

      val config = ClusterConfig(new FileInputStream(s"${arguments.home}/amaterasu.properties"))

      val frameworkBuilder = Protos.FrameworkInfo.newBuilder()
        .setName(s"${arguments.name} - Amaterasu Job")
        .setFailoverTimeout(config.timeout)
        .setUser(config.user)

      // TODO: test this
      val resume = arguments.jobId != null
      if (resume) {
        frameworkBuilder.setId(FrameworkID.newBuilder().setValue(arguments.jobId))
      }

      val framework = frameworkBuilder.build()

      val masterAddress = s"${config.master}:${config.masterPort}"

      val scheduler = JobScheduler(
        arguments.repo,
        arguments.branch,
        arguments.env,
        resume,
        config,
        arguments.report,
        arguments.home
      )

      val driver = new MesosSchedulerDriver(scheduler, framework, masterAddress)

      log.debug(s"Connecting to master on: $masterAddress")
      driver.run()

    case None =>
    // arguments are bad, error message will have been displayed
  }

} 
Example 131
Source File: DataLoader.scala    From amaterasu   with Apache License 2.0 5 votes vote down vote up
package org.apache.amaterasu.leader.utilities

import java.io.{File, FileInputStream}
import java.nio.file.{Files, Paths}

import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import org.apache.amaterasu.common.configuration.ClusterConfig
import org.apache.amaterasu.common.dataobjects.{ActionData, ExecData, TaskData}
import org.apache.amaterasu.common.execution.dependencies.{Dependencies, PythonDependencies}
import org.apache.amaterasu.common.logging.Logging
import org.apache.amaterasu.common.runtime.Environment
import org.apache.mesos.protobuf.ByteString
import org.yaml.snakeyaml.Yaml

import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.io.Source


object DataLoader extends Logging {

  val mapper = new ObjectMapper()
  mapper.registerModule(DefaultScalaModule)

  val ymlMapper = new ObjectMapper(new YAMLFactory())
  ymlMapper.registerModule(DefaultScalaModule)

  def getTaskData(actionData: ActionData, env: String): ByteString = {

    val srcFile = actionData.src
    val src = Source.fromFile(s"repo/src/$srcFile").mkString
    val envValue = Source.fromFile(s"repo/env/$env/job.yml").mkString

    val envData = ymlMapper.readValue(envValue, classOf[Environment])

    val data = mapper.writeValueAsBytes(TaskData(src, envData, actionData.groupId, actionData.typeId, actionData.exports))
    ByteString.copyFrom(data)

  }

  def getExecutorData(env: String, clusterConf: ClusterConfig): ByteString = {

    // loading the job configuration
    val envValue = Source.fromFile(s"repo/env/$env/job.yml").mkString //TODO: change this to YAML
    val envData = ymlMapper.readValue(envValue, classOf[Environment])
    // loading all additional configurations
    val files = new File(s"repo/env/$env/").listFiles().filter(_.isFile).filter(_.getName != "job.yml")
    val config = files.map(yamlToMap).toMap
    // loading the job's dependencies
    var depsData: Dependencies = null
    var pyDepsData: PythonDependencies = null
    if (Files.exists(Paths.get("repo/deps/jars.yml"))) {
      val depsValue = Source.fromFile(s"repo/deps/jars.yml").mkString
      depsData = ymlMapper.readValue(depsValue, classOf[Dependencies])
    }
    if (Files.exists(Paths.get("repo/deps/python.yml"))) {
      val pyDepsValue = Source.fromFile(s"repo/deps/python.yml").mkString
      pyDepsData = ymlMapper.readValue(pyDepsValue, classOf[PythonDependencies])
    }
    val data = mapper.writeValueAsBytes(ExecData(envData, depsData, pyDepsData, config))
    ByteString.copyFrom(data)
  }

  def yamlToMap(file: File): (String, Map[String, Any]) = {

    val yaml = new Yaml()
    val conf = yaml.load(new FileInputStream(file)).asInstanceOf[java.util.Map[String, Any]].asScala.toMap

    (file.getName.replace(".yml",""), conf)
  }

}

class ConfMap[String,  T <: ConfMap[String, T]] extends mutable.ListMap[String, Either[String, T]] 
Example 132
Source File: JarHelper.scala    From cassandra-util   with Apache License 2.0 5 votes vote down vote up
package com.protectwise.testing.ccm

import java.io.{FileOutputStream, FileInputStream, BufferedInputStream, File}
import java.util.jar.{JarEntry, JarOutputStream}

object JarHelper {

  
  def createJarForPath(path: File, targetJar: File): File = {
    def add(source: File, target: JarOutputStream): Unit = {
      var in: BufferedInputStream = null
      try {
        var name = source.getPath.replace("\\", "/").drop(path.getPath.length()+1)
        if (source.isDirectory && !name.isEmpty && !name.endsWith("/")) name += "/"
        println(s"      $name")
        if (source.isDirectory) {
          if (!name.isEmpty) {
            val entry = new JarEntry(name)
            entry.setTime(source.lastModified())
            target.putNextEntry(entry)
            target.closeEntry()
          }
          source.listFiles.foreach(add(_, target))
          return
        }

        val entry = new JarEntry(name)
        entry.setTime(source.lastModified())
        target.putNextEntry(entry)
        in = new BufferedInputStream(new FileInputStream(source))

        val buffer = Array.ofDim[Byte](1024)
        var count = 0
        while (count != -1) {
          count = in.read(buffer)
          if (count >= 0) target.write(buffer, 0, count)
        }
        target.closeEntry()
      } finally {
        if (in != null) in.close()
      }
    }

    //    val manifest = new java.util.jar.Manifest()
    val target = new JarOutputStream(new FileOutputStream(targetJar))
    add(path, target)
    target.close()

    targetJar
  }
}