java.nio.file.StandardCopyOption Scala Examples

The following examples show how to use java.nio.file.StandardCopyOption. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: IDEPathHelper.scala    From keycloak-benchmark   with Apache License 2.0 5 votes vote down vote up
import java.net.URI
import java.nio.file.attribute.{FileAttribute, BasicFileAttributes}
import java.nio.file.{StandardCopyOption, Paths, Files, Path}

import io.gatling.core.util.PathHelper._

class Directories(
						 val data: Path,
						 val bodies: Path,
						 val binaries: Path,
						 val results: Path
)

object IDEPathHelper {
	private val uri: URI = getClass.getClassLoader.getResource("gatling.conf").toURI

	val directories: Directories = if (uri.getScheme.startsWith("jar")) {
		val testDir = System.getProperty("test.dir");
		val mainDir: Path = if (testDir != null) {
			val dir = Paths.get(testDir);
			if (dir.exists) {
				if (!dir.isDirectory) {
					throw new IllegalArgumentException(testDir + " is not a directory")
				}
				dir
			} else {
				Files.createDirectory(dir)
			}
		} else {
			Files.createTempDirectory("gatling-")
		}
		System.out.println("Using " + mainDir + " as gatling directory")
		// unpack gatling.conf
		Files.copy(getClass.getResourceAsStream("gatling.conf"), mainDir.resolve("gatling.conf"), StandardCopyOption.REPLACE_EXISTING)
		// using createDirectories to ignore existing
		val directories = new Directories(
			Files.createDirectories(mainDir.resolve("data")),
			Files.createDirectories(mainDir.resolve("bodies")),
			Files.createDirectories(mainDir.resolve("binaries")),
			Files.createDirectories(mainDir.resolve("results")))
		val simulationFile: String = Engine.simulationClass.replace('.', '/') + ".class"
		// unpack simulation
		val targetFile: Path = mainDir.resolve("binaries").resolve(simulationFile)
		Files.createDirectories(targetFile.getParent)
		Files.copy(getClass.getResourceAsStream(simulationFile), targetFile, StandardCopyOption.REPLACE_EXISTING)
		directories
	} else {
		val projectRootDir = RichPath(uri).ancestor(3)
		val mavenResourcesDirectory = projectRootDir / "src" / "test" / "resources"
		val mavenTargetDirectory = projectRootDir / "target"

		new Directories(
			mavenResourcesDirectory / "data",
			mavenResourcesDirectory / "bodies",
			mavenTargetDirectory / "test-classes",
			mavenTargetDirectory / "results")
	}
} 
Example 2
Source File: FileRepository.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package ml.combust.mleap.executor.repository

import java.io.File
import java.net.URI
import java.nio.file.{Files, Path, StandardCopyOption}
import java.util.concurrent.Executors

import akka.actor.ActorSystem
import com.typesafe.config.{Config, ConfigFactory}
import ml.combust.mleap.executor.error.BundleException

import scala.concurrent.duration.TimeUnit
import scala.concurrent.{ExecutionContext, Future}

object FileRepositoryConfig {
  val defaults: Config = ConfigFactory.load().getConfig("ml.combust.mleap.executor.repository-defaults.file")
}

class FileRepositoryConfig(_config: Config) {
  val config: Config = _config.withFallback(FileRepositoryConfig.defaults)

  val move: Boolean = config.getBoolean("move")
  val threads: Int = config.getInt("threads")
}

class FileRepository(config: FileRepositoryConfig) extends Repository {
  private val threadPool = Executors.newFixedThreadPool(config.threads)
  implicit val diskEc: ExecutionContext = ExecutionContext.fromExecutor(threadPool)

  def this() = this(new FileRepositoryConfig(FileRepositoryConfig.defaults))

  override def downloadBundle(uri: URI): Future[Path] = Future {

    if (uri.getPath.isEmpty) {
      throw new BundleException("file path cannot be empty")
    }

    val local = new File(uri.getPath).toPath
    if (!Files.exists(local)) {
      throw new BundleException(s"file does not exist $local")
    }

    if (config.move) {
      val tmpFile = Files.createTempFile("mleap", ".bundle.zip")
      Files.copy(local, tmpFile, StandardCopyOption.REPLACE_EXISTING)
      tmpFile.toFile.deleteOnExit()
      tmpFile
    } else {
      local
    }
  }

  override def canHandle(uri: URI): Boolean = uri.getScheme == "file" || uri.getScheme == "jar:file"

  override def shutdown(): Unit = threadPool.shutdown()

  override def awaitTermination(timeout: Long, unit: TimeUnit): Unit = threadPool.awaitTermination(timeout, unit)
}

object FileRepositoryProvider extends RepositoryProvider {
  override def create(tConfig: Config)
                     (implicit system: ActorSystem): Repository = {
    val config = new FileRepositoryConfig(tConfig)

    new FileRepository(config)
  }
} 
Example 3
Source File: TestXgboost.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package ml.combust.mleap.databricks.runtime.testkit

import java.io.File
import java.nio.file.{Files, StandardCopyOption}

import ml.combust.bundle.BundleFile
import org.apache.spark.ml.bundle.SparkBundleContext
import org.apache.spark.ml.feature.{StringIndexer, VectorAssembler}
import org.apache.spark.sql.SparkSession
import com.databricks.spark.avro._
import ml.combust.mleap.spark.SparkSupport._
import ml.combust.mleap.runtime.MleapSupport._
import ml.dmlc.xgboost4j.scala.spark.XGBoostClassifier
import org.apache.spark.ml.Pipeline

class TestXgboost(session: SparkSession) extends Runnable {
  private val xgboostParams: Map[String, Any] = Map(
    "eta" -> 0.3,
    "max_depth" -> 2,
    "objective" -> "binary:logistic",
    "early_stopping_rounds" ->2,
    "num_round" -> 15,
    "nworkers" -> 2
  )

  override def run(): Unit = {
    val sqlContext = session.sqlContext

    // Create a temporary file and copy the contents of the resource avro to it
    val path = Files.createTempFile("mleap-databricks-runtime-testkit", ".avro")
    Files.copy(getClass.getClassLoader.getResource("datasources/lending_club_sample.avro").openStream(),
      path,
      StandardCopyOption.REPLACE_EXISTING)

    val sampleData = sqlContext.read.avro(path.toString)
    sampleData.show()

    val stringIndexer = new StringIndexer().
      setInputCol("fico_score_group_fnl").
      setOutputCol("fico_index")

    val featureAssembler = new VectorAssembler().
      setInputCols(Array(stringIndexer.getOutputCol, "dti", "loan_amount")).
      setOutputCol("features")

    val logisticRegression = new XGBoostClassifier(xgboostParams).
      setFeaturesCol("features").
      setLabelCol("approved").
      setPredictionCol("prediction")

    val pipeline = new Pipeline().setStages(Array(stringIndexer, featureAssembler, logisticRegression))

    val model = pipeline.fit(sampleData)

    val modelPath = Files.createTempFile("mleap-databricks-runtime-testkit", ".zip")
    Files.delete(modelPath)

    {
      println("Writing model to...", modelPath)
      implicit val sbc = SparkBundleContext.defaultContext.withDataset(model.transform(sampleData))
      val bf = BundleFile(new File(modelPath.toString))
      model.writeBundle.save(bf).get
      bf.close()
    }

    {
      val bf = BundleFile(new File(modelPath.toString))
      bf.loadMleapBundle()
      bf.close()
    }
  }
} 
Example 4
Source File: TestSparkMl.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package ml.combust.mleap.databricks.runtime.testkit

import java.io.File
import java.nio.file.{Files, StandardCopyOption}

import ml.combust.bundle.BundleFile
import org.apache.spark.ml.bundle.SparkBundleContext
import org.apache.spark.ml.feature.{StringIndexer, VectorAssembler}
import org.apache.spark.sql.SparkSession
import com.databricks.spark.avro._
import ml.combust.mleap.spark.SparkSupport._
import ml.combust.mleap.runtime.MleapSupport._
import org.apache.spark.ml.Pipeline
import org.apache.spark.ml.classification.LogisticRegression

class TestSparkMl(session: SparkSession) extends Runnable {
  override def run(): Unit = {
    val sqlContext = session.sqlContext

    // Create a temporary file and copy the contents of the resource avro to it
    val path = Files.createTempFile("mleap-databricks-runtime-testkit", ".avro")
    Files.copy(getClass.getClassLoader.getResource("datasources/lending_club_sample.avro").openStream(),
      path,
      StandardCopyOption.REPLACE_EXISTING)

    val sampleData = sqlContext.read.avro(path.toString)
    sampleData.show()

    val stringIndexer = new StringIndexer().
      setInputCol("fico_score_group_fnl").
      setOutputCol("fico_index")

    val featureAssembler = new VectorAssembler().
      setInputCols(Array(stringIndexer.getOutputCol, "dti", "loan_amount")).
      setOutputCol("features")

    val logisticRegression = new LogisticRegression().
      setFeaturesCol(featureAssembler.getOutputCol).
      setLabelCol("approved").
      setPredictionCol("prediction")

    val pipeline = new Pipeline().setStages(Array(stringIndexer, featureAssembler, logisticRegression))

    val model = pipeline.fit(sampleData)

    val modelPath = Files.createTempFile("mleap-databricks-runtime-testkit", ".zip")
    Files.delete(modelPath)

    // Save the model
    {
      println("Writing model to...", modelPath)
      implicit val sbc = SparkBundleContext.defaultContext.withDataset(model.transform(sampleData))
      val bf = BundleFile(new File(modelPath.toString))
      model.writeBundle.save(bf).get
      bf.close()
    }

    // Load the model
    {
      val bf = BundleFile(new File(modelPath.toString))
      bf.loadMleapBundle().get
      bf.close()
    }
  }
} 
Example 5
Source File: TestUtil.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package ml.combust.mleap.springboot

import java.io.File
import java.net.URI
import java.nio.file.{Files, StandardCopyOption}

import ml.combust.mleap.core.types.{ScalarType, StructField, StructType}
import ml.combust.mleap.runtime.frame.{DefaultLeapFrame, Row}

object TestUtil {

  lazy val demoUri = getClass.getClassLoader.getResource("demo.zip").toURI

  lazy val validFrame = DefaultLeapFrame(
    StructType(Seq(StructField("demo:a", ScalarType.Double),
      StructField("demo:c", ScalarType.Double),
      StructField("demo:d", ScalarType.Double))).get,
    Seq(Row(44.5, 22.1, 98.2)))

  lazy val incompleteFrame = DefaultLeapFrame(
    StructType(Seq(StructField("demo:a", ScalarType.Double),
      StructField("demo:d", ScalarType.Double))).get,
    Seq(Row(44.5, 98.2)))

  lazy val failingBytes = Array[Byte](69, 121, 101, 45, 62, 118, 101, 114, 61, 101, 98)

  def getBundle(uri: URI, createTmp: Boolean): URI = {
    if (createTmp) {
      val tmpFile = Files.createTempFile("demo", ".bundle.zip")
      val file = new File(uri.getPath).toPath
      Files.copy(file, tmpFile, StandardCopyOption.REPLACE_EXISTING)
      tmpFile.toFile.deleteOnExit()
      tmpFile.toUri
    } else {
      uri
    }
  }
} 
Example 6
Source File: BackupHandler.scala    From eclair   with Apache License 2.0 5 votes vote down vote up
package fr.acinq.eclair.db

import java.io.File
import java.nio.file.{Files, StandardCopyOption}

import akka.actor.{Actor, ActorLogging, Props}
import akka.dispatch.{BoundedMessageQueueSemantics, RequiresMessageQueue}
import fr.acinq.eclair.channel.ChannelPersisted

import scala.sys.process.Process
import scala.util.{Failure, Success, Try}



class BackupHandler private(databases: Databases, backupFile: File, backupScript_opt: Option[String]) extends Actor with RequiresMessageQueue[BoundedMessageQueueSemantics] with ActorLogging {

  // we listen to ChannelPersisted events, which will trigger a backup
  context.system.eventStream.subscribe(self, classOf[ChannelPersisted])

  def receive = {
    case persisted: ChannelPersisted =>
      val start = System.currentTimeMillis()
      val tmpFile = new File(backupFile.getAbsolutePath.concat(".tmp"))
      databases.backup(tmpFile)
      // this will throw an exception if it fails, which is possible if the backup file is not on the same filesystem
      // as the temporary file
      Files.move(tmpFile.toPath, backupFile.toPath, StandardCopyOption.REPLACE_EXISTING, StandardCopyOption.ATOMIC_MOVE)
      val end = System.currentTimeMillis()

      // publish a notification that we have updated our backup
      context.system.eventStream.publish(BackupCompleted)

      log.debug(s"database backup triggered by channelId=${persisted.channelId} took ${end - start}ms")

      backupScript_opt.foreach(backupScript => {
        Try {
          // run the script in the current thread and wait until it terminates
          Process(backupScript).!
        } match {
          case Success(exitCode) => log.debug(s"backup notify script $backupScript returned $exitCode")
          case Failure(cause) => log.warning(s"cannot start backup notify script $backupScript:  $cause")
        }
      })
  }
}

sealed trait BackupEvent

// this notification is sent when we have completed our backup process (our backup file is ready to be used)
case object BackupCompleted extends BackupEvent

object BackupHandler {
  // using this method is the only way to create a BackupHandler actor
  // we make sure that it uses a custom bounded mailbox, and a custom pinned dispatcher (i.e our actor will have its own thread pool with 1 single thread)
  def props(databases: Databases, backupFile: File, backupScript_opt: Option[String]) = Props(new BackupHandler(databases, backupFile, backupScript_opt)).withMailbox("eclair.backup-mailbox").withDispatcher("eclair.backup-dispatcher")
} 
Example 7
Source File: ProcessActorTest.scala    From scastie   with Apache License 2.0 5 votes vote down vote up
package com.olegych.scastie.util

import java.io.File
import java.nio.file.{Files, StandardCopyOption}

import akka.actor.{Actor, ActorRef, ActorSystem}
import akka.testkit.{ImplicitSender, TestActorRef, TestKit, TestProbe}
import com.olegych.scastie.api.ProcessOutput
import com.olegych.scastie.api.ProcessOutputType._
import com.olegych.scastie.util.ProcessActor._
import org.scalatest.BeforeAndAfterAll
import org.scalatest.funsuite.AnyFunSuiteLike

import scala.concurrent.duration._

class ProcessActorTest() extends TestKit(ActorSystem("ProcessActorTest")) with ImplicitSender with AnyFunSuiteLike with BeforeAndAfterAll {

  test("do it") {
    (1 to 10).foreach { i =>
      println(s"--- Run $i ---")

      val command = new File("target", "echo.sh")
      Files.copy(getClass.getResourceAsStream("/echo.sh"), command.toPath, StandardCopyOption.REPLACE_EXISTING)
      command.setExecutable(true)

      val probe = TestProbe()

      val processActor = TestActorRef(new ProcessReceiver(command.getPath, probe.ref))

      processActor ! Input("abcd")
      processActor ! Input("1234")
      processActor ! Input("quit")

      def expected(msg0: String): Unit = {
        probe.expectMsgPF(4000.milliseconds) {
          case ProcessOutput(msg1, StdOut, _) if msg0.trim == msg1.trim => true
          case ProcessOutput(msg1, StdOut, _) =>
            println(s""""$msg1" != "$msg0"""")
            false
        }
      }

      expected("abcd")
      expected("1234")
    }
  }

  override def afterAll: Unit = {
    TestKit.shutdownActorSystem(system)
  }
}

class ProcessReceiver(command: String, probe: ActorRef) extends Actor {
  private val props =
    ProcessActor.props(command = List("bash",  "-c", command.replace("\\", "/")), killOnExit = false)
  private val process = context.actorOf(props, name = "process-receiver")

  override def receive: Receive = {
    case output: ProcessOutput => probe ! output
    case input: Input          => process ! input
  }
} 
Example 8
Source File: FetchCache.scala    From coursier   with Apache License 2.0 5 votes vote down vote up
package coursier.internal

import java.io.File
import java.math.BigInteger
import java.nio.charset.StandardCharsets
import java.nio.file.{Files, Path, Paths, StandardCopyOption}
import java.security.MessageDigest

import coursier.cache.CacheLocks
import coursier.core.{Classifier, Dependency, Repository, Type}
import coursier.params.ResolutionParams
import coursier.paths.CachePath
import dataclass.data

@data class FetchCache(base: Path) {

  def dir(key: FetchCache.Key): Path =
    base.resolve(s"${key.sha1.take(2)}/${key.sha1.drop(2)}")
  def resultFile(key: FetchCache.Key): Path =
    dir(key).resolve("artifacts")
  def lockFile(key: FetchCache.Key): Path =
    dir(key).resolve("lock")

  def read(key: FetchCache.Key): Option[Seq[File]] = {
    val resultFile0 = resultFile(key)
    if (Files.isRegularFile(resultFile0)) {
      val artifacts = Predef.augmentString(new String(Files.readAllBytes(resultFile0), StandardCharsets.UTF_8))
        .lines
        .map(_.trim)
        .filter(_.nonEmpty)
        .map(Paths.get(_))
        .toVector

      if (artifacts.forall(Files.isRegularFile(_)))
        Some(artifacts.map(_.toFile))
      else
        None
    } else
      None
  }

  def write(key: FetchCache.Key, artifacts: Seq[File]): Boolean = {
    val resultFile0 = resultFile(key)
    val tmpFile = CachePath.temporaryFile(resultFile0.toFile).toPath

    def doWrite(): Unit = {
      Files.write(tmpFile, artifacts.map(_.getAbsolutePath).mkString("\n").getBytes(StandardCharsets.UTF_8))
      Files.move(tmpFile, resultFile0, StandardCopyOption.ATOMIC_MOVE)
    }

    CacheLocks.withLockOr(
      base.toFile,
      resultFile0.toFile
    )(
      { doWrite(); true },
      Some(false)
    )
  }

}

object FetchCache {

  private[coursier] final case class Key(
    dependencies: Seq[Dependency],
    repositories: Seq[Repository],
    resolutionParams: ResolutionParams,

    // these 4 come from ResolutionParams, but are ordered here
    forceVersion: Seq[(coursier.core.Module, String)],
    properties: Seq[(String, String)],
    forcedProperties: Seq[(String, String)],
    profiles: Seq[String],

    cacheLocation: String,
    classifiers: Seq[Classifier],
    mainArtifacts: Option[Boolean],
    artifactTypesOpt: Option[Seq[Type]]
  ) {
    lazy val repr: String =
      productIterator.mkString("(", ", ", ")")
    lazy val sha1: String = {
      val md = MessageDigest.getInstance("SHA-1")
      val b = md.digest(repr.getBytes(StandardCharsets.UTF_8))
      val s = new BigInteger(1, b).toString(16)
      ("0" * (40 - s.length)) + s
    }
  }

} 
Example 9
Source File: TrainingHelper.scala    From spark-nlp   with Apache License 2.0 5 votes vote down vote up
package com.johnsnowlabs.util

import java.io.File
import java.nio.file.{Files, Paths, StandardCopyOption}
import java.sql.Timestamp
import java.util.Date

import com.johnsnowlabs.nlp.pretrained.ResourceType.ResourceType
import com.johnsnowlabs.nlp.pretrained.{ResourceMetadata, ResourceType}
import org.apache.commons.io.FileUtils
import org.apache.spark.ml.util.MLWriter


object TrainingHelper {

  def saveModel(name: String,
                language: Option[String],
                libVersion: Option[Version],
                sparkVersion: Option[Version],
                modelWriter: MLWriter,
                folder: String,
                category: Option[ResourceType] = Some(ResourceType.NOT_DEFINED)
               ): Unit = {

    // 1. Get current timestamp
    val timestamp = new Timestamp(new Date().getTime)


    // 2. Save model to file
    val file = Paths.get(folder, timestamp.toString).toString.replaceAllLiterally("\\", "/")
    modelWriter.save(file)

    // 3. Zip file
    val tempzipFile = Paths.get(folder, timestamp + ".zip")
    ZipArchiveUtil.zip(file, tempzipFile.toString)

    // 4. Set checksum
    val checksum = FileHelper.generateChecksum(tempzipFile.toString)

    // 5. Create resource metadata
    val meta = new ResourceMetadata(name, language, libVersion, sparkVersion, true, timestamp, true, category = category, checksum)

    val zipfile = Paths.get(meta.fileName)

    // 6. Move the zip
    Files.move(tempzipFile, zipfile, StandardCopyOption.REPLACE_EXISTING)

    // 7. Remove original file
    try {
      FileUtils.deleteDirectory(new File(file))
    } catch {
      case _: java.io.IOException => //file lock may prevent deletion, ignore and continue
    }

      // 6. Add to metadata.json info about resource
      val metadataFile = Paths.get(folder, "metadata.json").toString
      ResourceMetadata.addMetadataToFile(metadataFile, meta)
    }
} 
Example 10
Source File: FileUtilities.scala    From mmlspark   with MIT License 5 votes vote down vote up
// Copyright (C) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See LICENSE in project root for information.

package com.microsoft.ml.spark.core.env

import java.io.File
import java.nio.file.{Files, StandardCopyOption}

import scala.io.{BufferedSource, Source}

object FileUtilities {

  def join(folders: String*): File = {
    folders.tail.foldLeft(new File(folders.head)) { case (f, s) => new File(f, s) }
  }

  def join(base: File, folders: String*): File = {
    folders.foldLeft(base) { case (f, s) => new File(f, s) }
  }

  // Same for StandardOpenOption
  type StandardOpenOption = java.nio.file.StandardOpenOption
  object StandardOpenOption {
    import java.nio.file.{StandardOpenOption => S}
    val APPEND = S.APPEND
    val CREATE = S.CREATE
  }

  def allFiles(dir: File, pred: (File => Boolean) = null): Array[File] = {
    def loop(dir: File): Array[File] = {
      val (dirs, files) = dir.listFiles.sorted.partition(_.isDirectory)
      (if (pred == null) files else files.filter(pred)) ++ dirs.flatMap(loop)
    }
    loop(dir)
  }

  // readFile takes a file name or a File, and function to extract a value from
  // BufferedSource which defaults to _.mkString; performs the read, closes the
  // source, and returns the result
  def readFile[T](file: File, read: BufferedSource => T): T = {
    val i = Source.fromFile(file)
    try read(i) finally i.close
  }
  def readFile(file: File): String = readFile(file, _.mkString)

  def writeFile(file: File, stuff: Any, flags: StandardOpenOption*): Unit = {
    Files.write(file.toPath, stuff.toString.getBytes(), flags: _*)
    ()
  }

  def copyFile(from: File, toDir: File, overwrite: Boolean = false): Unit = {
    Files.copy(from.toPath, (new File(toDir, from.getName)).toPath,
               (if (overwrite) Seq(StandardCopyOption.REPLACE_EXISTING)
                else Seq()): _*)
    ()
  }

  // Perhaps this should move into a more specific place, not a generic file utils thing
  def zipFolder(dir: File, out: File): Unit = {
    import java.io.{BufferedInputStream, FileInputStream, FileOutputStream}
    import java.util.zip.{ZipEntry, ZipOutputStream}
    val bufferSize = 2 * 1024
    val data = new Array[Byte](bufferSize)
    val zip = new ZipOutputStream(new FileOutputStream(out))
    val prefixLen = dir.getParentFile.toString.length + 1
    allFiles(dir).foreach { file =>
      zip.putNextEntry(new ZipEntry(file.toString.substring(prefixLen).replace(java.io.File.separator, "/")))
      val in = new BufferedInputStream(new FileInputStream(file), bufferSize)
      var b = 0
      while (b >= 0) { zip.write(data, 0, b); b = in.read(data, 0, bufferSize) }
      in.close()
      zip.closeEntry()
    }
    zip.close()
  }

} 
Example 11
Source File: ArtifactFSSaver.scala    From marvin-engine-executor   with Apache License 2.0 5 votes vote down vote up
package org.marvin.artifact.manager

import java.nio.file.{Files, Path, Paths, StandardCopyOption}

import akka.Done
import akka.actor.{Actor, ActorLogging}
import org.marvin.artifact.manager.ArtifactSaver.{GetArtifact, SaveToLocal, SaveToRemote}
import org.marvin.model.EngineMetadata

class ArtifactFSSaver(metadata: EngineMetadata) extends Actor with ActorLogging {
  override def preStart() = {
    log.info(s"${this.getClass().getCanonicalName} actor initialized...")
  }

  def generatePaths(artifactName: String, protocol: String): Map[String, Path] = {
    Map(
      "localPath" -> Paths.get(s"${metadata.artifactsLocalPath}/${metadata.name}/$artifactName"),
      "remotePath" -> Paths.get((s"${metadata.artifactsRemotePath}/${metadata.name}/${metadata.version}/$artifactName/$protocol"))
    )
  }

  def copyFile(origin: Path, destination: Path): Unit = {
    if (!destination.getParent.toFile.exists()) destination.getParent.toFile.mkdirs()

    log.info(s"Copying files from ${origin} to ${destination}")

    Files.copy(origin, destination, StandardCopyOption.REPLACE_EXISTING)

    log.info(s"File ${destination} saved!")
  }

  def validatePath(path: Path): Boolean = {
    new java.io.File(path.toString).exists
  }

  override def receive: Receive = {
    case SaveToLocal(artifactName, protocol) =>
      log.info("Receive message and starting to working...")
      val uris = generatePaths(artifactName, protocol)

      // Validate if the protocol is correct
      if (validatePath(uris("remotePath")))
        copyFile(uris("remotePath"), uris("localPath"))
      else
        log.error(s"Invalid protocol: ${protocol}, save process canceled!")

      sender ! Done

    case SaveToRemote(artifactName, protocol) =>
      log.info("Receive message and starting to working...")
      val uris = generatePaths(artifactName, protocol)

      // Validate if the protocol is correct
      if (validatePath(uris("localPath")))
        copyFile(uris("localPath"), uris("remotePath"))
      else
        log.error(s"Invalid protocol: ${protocol}, save process canceled!")

      sender ! Done

    case GetArtifact(artifactName, protocol) =>
      log.info("Receive message and starting to working...")
      val uris = generatePaths(artifactName, protocol)
      var response: String = ""

      // Validate if the protocol is correct
      if (validatePath(uris("localPath")))
        response = scala.io.Source.fromFile(uris("localPath").toString).getLines.mkString
      else
        log.error(s"Invalid protocol: ${protocol}, load process canceled!")

      sender ! response

    case _ =>
      log.warning("Received a bad format message...")
  }
} 
Example 12
Source File: JobUtils.scala    From fusion-data   with Apache License 2.0 5 votes vote down vote up
package mass.job.util

import java.io.File
import java.nio.charset.Charset
import java.nio.file.{ Files, Path, StandardCopyOption }
import java.util.zip.ZipFile

import com.typesafe.scalalogging.StrictLogging
import helloscala.common.Configuration
import helloscala.common.util.{ DigestUtils, Utils }
import mass.common.util.FileUtils
import mass.core.job.JobConstants
import mass.job.JobSettings
import mass.message.job._
import mass.model.job.{ JobItem, JobTrigger }

import scala.concurrent.{ ExecutionContext, Future }


object JobUtils extends StrictLogging {
  case class JobZipInternal private (configs: Vector[JobCreateReq], entries: Vector[Path])

  def uploadJob(jobSettings: JobSettings, req: JobUploadJobReq)(implicit ec: ExecutionContext): Future[JobZip] =
    Future {
      val sha256 = DigestUtils.sha256HexFromPath(req.file)
      val dest = jobSettings.jobSavedDir.resolve(sha256.take(2)).resolve(sha256)

      val jobZipInternal = parseJobZip(req.file, req.charset, dest.resolve(JobConstants.DIST)) match {
        case Right(v) => v
        case Left(e)  => throw e
      }

      val zipPath = dest.resolve(req.fileName)
      Files.move(req.file, zipPath, StandardCopyOption.REPLACE_EXISTING)
      JobZip(zipPath, jobZipInternal.configs, jobZipInternal.entries)
    }

  @inline def parseJobZip(file: Path, charset: Charset, dest: Path): Either[Throwable, JobZipInternal] =
    parseJobZip(file.toFile, charset, dest)

  def parseJobZip(file: File, charset: Charset, dest: Path): Either[Throwable, JobZipInternal] = Utils.either {
    import scala.jdk.CollectionConverters._
    import scala.language.existentials

    val zip = new ZipFile(file, charset)
    try {
      val (confEntries, fileEntries) = zip
        .entries()
        .asScala
        .filterNot(entry => entry.isDirectory)
        .span(entry => entry.getName.endsWith(JobConstants.ENDS_SUFFIX) && !entry.isDirectory)
      val configs =
        confEntries.map(confEntry =>
          parseJobConf(FileUtils.getString(zip.getInputStream(confEntry), charset, "\n")) match {
            case Right(config) => config
            case Left(e)       => throw e
          })

      val buf = Array.ofDim[Byte](1024)
      val entryPaths = fileEntries.map { entry =>
        val entryName = entry.getName
        val savePath = dest.resolve(entryName)
        if (!Files.isDirectory(savePath.getParent)) {
          Files.createDirectories(savePath.getParent)
        }
        FileUtils.write(zip.getInputStream(entry), Files.newOutputStream(savePath), buf) // zip entry存磁盘
        savePath
      }

      JobZipInternal(configs.toVector, entryPaths.toVector)
    } finally {
      if (zip ne null) zip.close()
    }
  }

  def parseJobConf(content: String): Either[Throwable, JobCreateReq] = Utils.either {
    val conf = Configuration.parseString(content)
    val jobItem = JobItem(conf.getConfiguration("item"))
    val jobTrigger = JobTrigger(conf.getConfiguration("trigger"))
    JobCreateReq(conf.get[Option[String]]("key"), jobItem, jobTrigger)
  }
}

case class JobZip(zipPath: Path, configs: Vector[JobCreateReq], entries: Vector[Path]) 
Example 13
Source File: DynamicDistBuilder.scala    From sbt-idea-plugin   with Apache License 2.0 5 votes vote down vote up
package org.jetbrains.sbtidea.packaging.artifact

import java.nio.file.{Files, Path, Paths, StandardCopyOption}

import org.jetbrains.sbtidea.packaging.PackagingKeys.ExcludeFilter
import org.jetbrains.sbtidea.packaging._
import sbt.File
import sbt.Keys.TaskStreams

class DynamicDistBuilder(stream: TaskStreams, target: File, outputDir: File, private val hints: Seq[File]) extends DistBuilder(stream, target) {

  override def packageJar(to: Path, mappings: Mappings): Unit = {
    val isStatic = mappings.forall(_.metaData.static)
    if (isStatic)
      super.packageJar(to, mappings)
    else {
      val newOutputPath = outputDir.toPath.resolve("classes")
      if (!Files.exists(newOutputPath) || hints.isEmpty)
        packageNoHints(newOutputPath, mappings)
      else
        packageUsingHints(newOutputPath)
    }
  }

  private def packageUsingHints(newOutputPath: Path): Unit = {
    timed(s"Using ${hints.size} hints from previous compilation: $newOutputPath", {
      val key = "classes"
      val offset = key.length + 1
      for (hint <- hints) {
        val hintStr = hint.toString
        val relativisedStr = hintStr.substring(hintStr.indexOf(key) + offset)
        val newRelativePath = Paths.get(relativisedStr)
        val newAbsolutePath = newOutputPath.resolve(newRelativePath)
        if (newAbsolutePath.toFile.getParentFile == null || !newAbsolutePath.toFile.getParentFile.exists())
          Files.createDirectories(newAbsolutePath.getParent)
        Files.copy(hint.toPath, newAbsolutePath, StandardCopyOption.REPLACE_EXISTING)
      }
    })
  }

  private def packageNoHints(newOutputPath: Path, mappings: Mappings): Unit = {
    val packager = new DynamicPackager(newOutputPath, new NoOpClassShader, ExcludeFilter.AllPass, incrementalCache)
    timed(s"classes(${mappings.size}): $newOutputPath",
      packager.mergeIntoOne(mappings.map(_.from.toPath))
    )
  }

  override def patch(to: Path, mappings: Mappings): Unit = {
    streams.log.info(s"Patching has no effect when building dynamic artifact")
  }

}