com.amazonaws.auth.AWSCredentials Scala Examples

The following examples show how to use com.amazonaws.auth.AWSCredentials. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: Credentials.scala    From spark-select   with Apache License 2.0 5 votes vote down vote up
package io.minio.spark.select

import java.net.URI

// For BasicAWSCredentials
import com.amazonaws.auth.AWSCredentials
import com.amazonaws.auth.AWSCredentialsProvider
import com.amazonaws.auth.BasicAWSCredentials
import com.amazonaws.auth.BasicSessionCredentials
import com.amazonaws.auth.DefaultAWSCredentialsProviderChain

import org.apache.hadoop.conf.Configuration

private[spark] object Credentials {
  private def staticCredentialsProvider(credentials: AWSCredentials): AWSCredentialsProvider = {
    new AWSCredentialsProvider {
      override def getCredentials: AWSCredentials = credentials
      override def refresh(): Unit = {}
    }
  }

  def load(location: Option[String], hadoopConfiguration: Configuration): AWSCredentialsProvider = {
    val uri = new URI(location.getOrElse(""))
    val uriScheme = uri.getScheme

    uriScheme match {
      case "s3" | "s3a" =>
        // This matches what S3A does, with one exception: we don't
        // support anonymous credentials. First, try to parse from URI:
        Option(uri.getUserInfo).flatMap { userInfo =>
          if (userInfo.contains(":")) {
            val Array(accessKey, secretKey) = userInfo.split(":")
            Some(staticCredentialsProvider(new BasicAWSCredentials(accessKey, secretKey)))
          } else {
            None
          }
        }.orElse {
          val accessKey = hadoopConfiguration.get(s"fs.s3a.access.key", null)
          val secretKey = hadoopConfiguration.get(s"fs.s3a.secret.key", null)
          val sessionToken = hadoopConfiguration.get(s"fs.s3a.session.token", null)
          if (accessKey != null && secretKey != null) {
            if (sessionToken != null) {
              Some(staticCredentialsProvider(new BasicSessionCredentials(accessKey, secretKey, sessionToken)))
            } else {
              Some(staticCredentialsProvider(new BasicAWSCredentials(accessKey, secretKey)))
            }
          } else {
            None
          }
        }.getOrElse {
          // Finally, fall back on the instance profile provider
          new DefaultAWSCredentialsProviderChain()
        }
      case other =>
        throw new IllegalArgumentException(s"Unrecognized scheme $other; expected s3, or s3a")
    }
  }
} 
Example 2
Source File: RokkuS3ProxyVirtualHostedItTest.scala    From rokku   with Apache License 2.0 5 votes vote down vote up
package com.ing.wbaa.rokku.proxy

import akka.http.scaladsl.model.Uri.Authority
import com.amazonaws.ClientConfiguration
import com.amazonaws.auth.{AWSCredentials, AWSStaticCredentialsProvider, BasicSessionCredentials}
import com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration
import com.amazonaws.services.s3.{AmazonS3, AmazonS3ClientBuilder}

class RokkuS3ProxyVirtualHostedItTest extends RokkuS3ProxyItTest {

  override def getAmazonS3(authority: Authority,
                           credentials: AWSCredentials = new BasicSessionCredentials("accesskey", "secretkey", "token")
                          ): AmazonS3 = {
    val cliConf = new ClientConfiguration()
    cliConf.setMaxErrorRetry(1)

    AmazonS3ClientBuilder
      .standard()
      .withClientConfiguration(cliConf)
      .withCredentials(new AWSStaticCredentialsProvider(credentials))
      .withPathStyleAccessEnabled(false)
      .withEndpointConfiguration(new EndpointConfiguration(s"http://s3.localhost:${authority.port}", awsRegion))
      .build()
  }
} 
Example 3
Source File: S3SdkHelpers.scala    From rokku   with Apache License 2.0 5 votes vote down vote up
package com.ing.wbaa.testkit.awssdk

import java.io.File

import akka.http.scaladsl.model.Uri.Authority
import com.amazonaws.ClientConfiguration
import com.amazonaws.auth.{AWSCredentials, AWSStaticCredentialsProvider, BasicSessionCredentials}
import com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration
import com.amazonaws.services.s3.transfer.TransferManagerBuilder
import com.amazonaws.services.s3.transfer.model.UploadResult
import com.amazonaws.services.s3.{AmazonS3, AmazonS3ClientBuilder}
import com.typesafe.config.ConfigFactory

import scala.collection.JavaConverters._


trait S3SdkHelpers {
  val awsRegion = ConfigFactory.load().getString("rokku.storage.s3.region")

  def getAmazonS3(authority: Authority,
                  credentials: AWSCredentials = new BasicSessionCredentials("accesskey", "secretkey", "token")
                 ): AmazonS3 = {
    val cliConf = new ClientConfiguration()
    cliConf.setMaxErrorRetry(1)

    AmazonS3ClientBuilder
      .standard()
      .withClientConfiguration(cliConf)
      .withCredentials(new AWSStaticCredentialsProvider(credentials))
      .withPathStyleAccessEnabled(true)
      .withEndpointConfiguration(new EndpointConfiguration(s"http://s3.localhost:${authority.port}", awsRegion))
      .build()
  }

  def getKeysInBucket(sdk: AmazonS3, bucket: String): List[String] =
    sdk
      .listObjectsV2(bucket)
      .getObjectSummaries
      .asScala.toList
      .map(_.getKey)

  def doMultiPartUpload(sdk: AmazonS3, bucket: String, file: String, key: String): UploadResult = {
    val upload = TransferManagerBuilder
      .standard()
      .withS3Client(sdk)
      .build()
      .upload(bucket, key, new File(file))

    upload.waitForUploadResult()
  }
} 
Example 4
Source File: S3Sandbox.scala    From redshift-fake-driver   with Apache License 2.0 5 votes vote down vote up
package jp.ne.opt.redshiftfake

import java.net.URI

import com.amazonaws.auth.{AWSCredentials, BasicAWSCredentials}
import com.amazonaws.regions.RegionUtils
import com.amazonaws.services.s3.AmazonS3Client
import org.gaul.s3proxy.{AuthenticationType, S3Proxy}
import org.jclouds.ContextBuilder
import org.jclouds.blobstore.BlobStoreContext
import org.scalatest.{BeforeAndAfterAll, Suite}

trait S3Sandbox extends BeforeAndAfterAll {this: Suite =>

  val dummyCredentials:  Credentials.WithKey
  val s3Endpoint: String

  var s3Proxy: S3Proxy = _

  override def beforeAll(): Unit = {
    val blobContext: BlobStoreContext = ContextBuilder
      .newBuilder("transient")
      .build(classOf[BlobStoreContext])

    s3Proxy = S3Proxy.builder
      .blobStore(blobContext.getBlobStore)
      .awsAuthentication(AuthenticationType.AWS_V4, dummyCredentials.accessKeyId, dummyCredentials.secretAccessKey)
      .endpoint(URI.create(s3Endpoint))
      .build
    s3Proxy.start()
  }

  override def afterAll(): Unit = {
    s3Proxy.stop()
  }

  def createS3Client(s3Region: String): AmazonS3Client = {
    val credentials: AWSCredentials = new BasicAWSCredentials(dummyCredentials.accessKeyId, dummyCredentials.secretAccessKey)
    val client = new AmazonS3Client(credentials)
    client.setRegion(RegionUtils.getRegion(s3Region))
    client.setEndpoint(s3Endpoint)

    client
  }
} 
Example 5
Source File: KibanaForwarder.scala    From shield   with MIT License 5 votes vote down vote up
package shield.actors.listeners

import akka.actor.{Actor, ActorLogging}
import com.amazonaws.auth.{AWSCredentials, DefaultAWSCredentialsProviderChain}
import com.typesafe.config.Config
import shield.actors.RestartLogging
import org.joda.time.format.DateTimeFormat
import org.joda.time.{DateTimeZone, DateTime}
import shield.aws.AWSSigningConfig
import shield.metrics.Instrumented
import spray.client.pipelining._
import spray.http.HttpResponse
import shield.aws.AWSImplicits._
import spray.json.DefaultJsonProtocol._
import spray.json._

// todo: ensure useful mapping on the index
class KibanaForwarder(id: String, host: String, indexPrefix: String, ttype: String, maxOutstanding: Int, signingParams: AWSSigningConfig) extends Actor with ActorLogging with RestartLogging with Instrumented {
  implicit val ctx = context.dispatcher

  // todo: timeout?
  val awsSigningConfig = signingParams
  val pipeline = sendReceive
  val dayFormat = DateTimeFormat.forPattern("yyyy.MM.dd")
  val outstandingCounter = metrics.counter("outstandingPosts", id)
  val droppedMeter = metrics.meter("droppedAccessLogs", id)
  val postTimer = timing("postToKibana", id)

  def receive = {
    case LogsFlushed =>
      outstandingCounter -= 1

    case AccessLogs(buffer) =>
      if (buffer.nonEmpty) {
        if (outstandingCounter.count >= maxOutstanding) {
          droppedMeter.mark(buffer.length)
        } else postTimer {
          outstandingCounter += 1

          val date = DateTimeFormat.forPattern("yyyy.MM.dd").print(DateTime.now(DateTimeZone.UTC))
          // todo: CompactPrint is 1% cpu under load tests.  Faster serialization library?
          val orderedCommands = buffer.flatMap { doc =>
            List(
              JsObject(
                "index" -> JsObject(
                  "_index" -> JsString(s"$indexPrefix-$date"),
                  "_type" -> JsString(ttype)
                )
              ).toJson.compactPrint,
              doc.toJson.compactPrint
            )
          }
          val req = Post(s"$host/_bulk", orderedCommands.mkString("\n") + "\n").withAWSSigning(awsSigningConfig)
          pipeline(req) andThen LogCollector.handleResults(self, droppedMeter, log, buffer.length)
        }
      }
  }
} 
Example 6
Source File: AuthUtilSpec.scala    From shield   with MIT License 5 votes vote down vote up
package shield.implicits

import com.amazonaws.auth.{AWSCredentials, AWSCredentialsProvider, AWSCredentialsProviderChain}
import org.specs2.mutable.Specification
import shield.aws.{AWSSigningConfig, AuthUtil}
import spray.http._


class AuthUtilSpec extends Specification {
  //Set consistant times that will produce consistant results for the tests
  val d1 = "20160315T141234Z"
  val d2 = "20160315"

  //Create a new config, these values are typically found in application.conf
  val config = new AWSSigningConfig("example-elasticsearch-host", "us-west-1", "es", true, new AWSCredentialsProviderChain(new StaticCredentialProvider()))

  "AuthUtil" should {

    "Use SHA256" in {
      println(AuthUtil.hashAsString("Hello world!"))
      AuthUtil.hashAsString("Hello world!") must be equalTo "c0535e4be2b79ffd93291305436bf889314e4a3faec05ecffcbb7df31ad9e51a"
      AuthUtil.hashAsString("123$%^abcDEF") must be equalTo "3b43642576e2c2cf349f34ff7f10e700bf485e6982647a50e361e883a5aaafa2"
      AuthUtil.hashAsString("  _***~`  ") must be equalTo "0597e54e8278a8673f09842d03e4af3a2688d1a15a55a640968382a5311416b4"
    }

    "Create canonical request hash" in {
      val request = new HttpRequest(HttpMethods.GET, Uri("https://example-elasticsearch-host.com:80"), List(), HttpEntity(HttpData("Sample data for a sample request ~*)@#$) @#(((")))

      println(AuthUtil.createCanonicalHash(request, "example-elasticsearch-host"))
      AuthUtil.createCanonicalHash(request, "example-elasticsearch-host") must be equalTo "05ef99e67afa47f06ed12084460baa4fca0bfbf92faebabed00fa78796028c5d"
    }

    "Create string to sign from a given canonical request" in {
      val canonicalRequestHash = "05ef99e67afa47f06ed12084460baa4fca0bfbf92faebabed00fa78796028c5d"

      AuthUtil.createStringToSign(d1, d2, config.region, config.service, canonicalRequestHash) must be equalTo "AWS4-HMAC-SHA256\n20160315\n20160315T141234Z/us-west-1/es/aws4_request\n05ef99e67afa47f06ed12084460baa4fca0bfbf92faebabed00fa78796028c5d"
    }

    "Create a signature" in {
      val stringToSign = "AWS4-HMAC-SHA256\n20160315\n20160315T141234Z/us-west-1/es/aws4_request\n05ef99e67afa47f06ed12084460baa4fca0bfbf92faebabed00fa78796028c5d"
      val signature = AuthUtil.hmacSHA256AsString("AWS4-HMAC-SHA256\n20160315\n20160315T141234Z/us-west-1/es/aws4_request\n05ef99e67afa47f06ed12084460baa4fca0bfbf92faebabed00fa78796028c5d", AuthUtil.createSignatureKey(config.getSecretKey(), d1, config.region, config.service))

      signature must be equalTo "68e811337b35141320236cf585a7fefad71d8948e4d1e9d5eb3583474d31eb6a"
    }
  }
}

//Create a static credential provider so that the access key and secret key stay the same for the purposes of testing
class StaticCredentialProvider extends AWSCredentialsProvider {
  override def refresh(): Unit = { }

  override def getCredentials: AWSCredentials = new AWSCredentials {
    override def getAWSAccessKeyId: String = "AccessKeyId"

    override def getAWSSecretKey: String = "SuperSecretKey"
  }
} 
Example 7
Source File: S3Utils.scala    From elastiknn   with Apache License 2.0 5 votes vote down vote up
package com.klibisz.elastiknn.benchmarks

import com.amazonaws.ClientConfiguration
import com.amazonaws.auth.{AWSCredentials, AWSStaticCredentialsProvider}
import com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration
import com.amazonaws.services.s3.{AmazonS3, AmazonS3ClientBuilder}

object S3Utils {

  
  def minioClient(): AmazonS3 = {
    val endpointConfig = new EndpointConfiguration("http://localhost:9000", "us-east-1")
    val clientConfig = new ClientConfiguration()
    clientConfig.setSignerOverride("AWSS3V4SignerType")
    AmazonS3ClientBuilder.standard
      .withPathStyleAccessEnabled(true)
      .withEndpointConfiguration(endpointConfig)
      .withClientConfiguration(clientConfig)
      .withCredentials(new AWSStaticCredentialsProvider(new AWSCredentials {
        override def getAWSAccessKeyId: String = "minioadmin"
        override def getAWSSecretKey: String = "minioadmin"
      }))
      .build()
  }

  def defaultClient(): AmazonS3 = AmazonS3ClientBuilder.defaultClient()

} 
Example 8
Source File: S3ConfigManager.scala    From teamcity-s3-plugin   with Apache License 2.0 5 votes vote down vote up
package com.gu.teamcity

import java.io.{File, PrintWriter}

import com.amazonaws.auth.{BasicAWSCredentials, AWSCredentialsProvider, AWSCredentials}
import jetbrains.buildServer.serverSide.ServerPaths
import org.json4s._
import org.json4s.native.JsonMethods._
import org.json4s.native.Serialization
import org.json4s.native.Serialization._

case class S3Config(
  artifactBucket: Option[String], buildManifestBucket: Option[String], tagManifestBucket: Option[String],
  awsAccessKey: Option[String], awsSecretKey: Option[String]
)

class S3ConfigManager(paths: ServerPaths) extends AWSCredentialsProvider {
  implicit val formats = Serialization.formats(NoTypeHints)

  val configFile = new File(s"${paths.getConfigDir}/s3.json")

  private[teamcity] var config: Option[S3Config] = {
    if (configFile.exists()) {
      parse(configFile).extractOpt[S3Config]
    } else None
  }

  def artifactBucket: Option[String] = config.flatMap(_.artifactBucket)
  def buildManifestBucket: Option[String] = config.flatMap(_.buildManifestBucket)
  def tagManifestBucket: Option[String] = config.flatMap(_.tagManifestBucket)

  private[teamcity] def update(config: S3Config): Unit = {
    this.config = Some(if (config.awsSecretKey.isEmpty && config.awsAccessKey == this.config.flatMap(_.awsAccessKey)) {
      config.copy(awsSecretKey = this.config.flatMap(_.awsSecretKey))
    } else config)
  }

  def updateAndPersist(newConfig: S3Config): Unit = {
    synchronized {
      update(newConfig)
      val out = new PrintWriter(configFile, "UTF-8")
      try { writePretty(config, out) }
      finally { out.close }
    }
  }

  def details: Map[String, Option[String]] = Map(
    "artifactBucket" -> artifactBucket,
    "buildManifestBucket" -> buildManifestBucket,
    "tagManifestBucket" -> tagManifestBucket,
    "accessKey" -> config.flatMap(_.awsAccessKey)
  )

  override def getCredentials: AWSCredentials = (for {
    c <- config
    accessKey <- c.awsAccessKey
    secretKey <- c.awsSecretKey
  } yield new BasicAWSCredentials(accessKey, secretKey)).getOrElse(null) // Yes, this is sad

  override def refresh(): Unit = ()
}

object S3ConfigManager {
  val bucketElement = "bucket"
  val s3Element = "S3"
} 
Example 9
Source File: RedshiftReaderM.scala    From SqlShift   with MIT License 5 votes vote down vote up
package com.databricks.spark.redshift

import com.amazonaws.auth.AWSCredentials
import com.amazonaws.services.s3.AmazonS3Client
import org.apache.spark.SparkContext
import org.apache.spark.sql.sources.BaseRelation
import org.apache.spark.sql.{DataFrame, SQLContext}

object RedshiftReaderM {

    val endpoint = "s3.ap-south-1.amazonaws.com"

    def getS3Client(provider: AWSCredentials): AmazonS3Client = {
        val client = new AmazonS3Client(provider)
        client.setEndpoint(endpoint)
        client
    }

    def getDataFrameForConfig(configs: Map[String, String], sparkContext: SparkContext, sqlContext: SQLContext): DataFrame = {
        val source: DefaultSource = new DefaultSource(new JDBCWrapper(), getS3Client)
        val br: BaseRelation = source.createRelation(sqlContext, configs)
        sqlContext.baseRelationToDataFrame(br)
    }
} 
Example 10
Source File: AwsConfig.scala    From cave   with MIT License 5 votes vote down vote up
package com.cave.metrics.data

import com.amazonaws.auth.{AWSCredentials, AWSCredentialsProvider, BasicAWSCredentials, ClasspathPropertiesFileCredentialsProvider}
import com.typesafe.config.Config

trait AmazonWebServiceConfig {
  def endpoint: String
  def service: String
  def region: String
}

class AwsConfig(config: Config) {

  private lazy val awsConfig = config.resolve.getConfig("aws")
  private lazy val rdsConfig = awsConfig.resolve.getConfig("rds")

  private lazy val awsCredentialsConfig = awsConfig.getConfig("credentials")
  lazy val awsCredentialsProvider = createAwsCredentialsProvider(
    awsCredentialsConfig.getString("access-key"),
    awsCredentialsConfig.getString("secret-key"))

  println("AWS Access Key: " + awsCredentialsProvider.getCredentials.getAWSAccessKeyId)

  private lazy val kinesisConfig = awsConfig.getConfig("kinesis")
  lazy val awsKinesisConfig = makeAmazonWebServiceConfig(kinesisConfig)

  private lazy val awsKinesisStreamConfig = kinesisConfig.getConfig("stream")
  lazy val rawStreamName = awsKinesisStreamConfig.getString("raw")
  lazy val processedStreamName = awsKinesisStreamConfig.getString("processed")

  private lazy val sqsConfig = awsConfig.getConfig("sqs")
  lazy val awsSQSConfig = makeAmazonWebServiceConfig(sqsConfig)

  lazy val longPollTimeInSeconds = sqsConfig.getInt("longPollTimeInSeconds")

  private lazy val awsSqsQueuesConfig = sqsConfig.getConfig("queues")
  lazy val configurationChangesQueueName = awsSqsQueuesConfig.getString("config-changes")
  lazy val alarmScheduleQueueName = awsSqsQueuesConfig.getString("alarm-schedule")

  private lazy val autoScalingConfig = awsConfig.getConfig("autoscaling")
  lazy val awsAutoScalingConfig = makeAmazonWebServiceConfig(autoScalingConfig)

  private lazy val ec2Config = awsConfig.getConfig("ec2")
  lazy val awsEC2Config = makeAmazonWebServiceConfig(ec2Config)

  private lazy val snsConfig = awsConfig.getConfig("sns")
  lazy val awsSNSConfig = makeAmazonWebServiceConfig(snsConfig)

  private lazy val awsSnsTopicsConfig = snsConfig.getConfig("topics")
  lazy val configurationChangesTopicName = awsSnsTopicsConfig.getString("config-changes")

  lazy val rdsJdbcDatabaseClass = rdsConfig.getString("database-class")
  lazy val rdsJdbcDatabaseUrl = rdsConfig.getString("database-jdbc")
  lazy val rdsJdbcDatabaseServer = rdsConfig.getString("database-server")
  lazy val rdsJdbcDatabasePort = rdsConfig.getString("database-port")
  lazy val rdsJdbcDatabaseName = rdsConfig.getString("database-name")
  lazy val rdsJdbcDatabaseUser = rdsConfig.getString("database-user")
  lazy val rdsJdbcDatabasePassword = rdsConfig.getString("database-password")
  lazy val rdsJdbcDatabasePoolSize = rdsConfig.getInt("pool-size")
  lazy val rdsJdbcConnectionTimeout = rdsConfig.getInt("connection-timeout")

  lazy val leadershipTermTimeoutSeconds = config.getInt("leadershipTermTimeoutSeconds")
  lazy val leadershipTermLengthSeconds = config.getInt("leadershipTermLengthSeconds")

  
  private[this] def makeAmazonWebServiceConfig(config: Config) = new AmazonWebServiceConfig {
      override def endpoint: String = config.getString("endpoint")
      override def service: String = config.getString("service")
      override def region: String = config.getString("region")
    }
} 
Example 11
Source File: S3Brain.scala    From sumobot   with Apache License 2.0 5 votes vote down vote up
package com.sumologic.sumobot.brain

import java.io.{ByteArrayInputStream, ByteArrayOutputStream}
import java.util.Properties

import akka.actor.{Actor, Props}
import com.amazonaws.auth.{AWSCredentials, AWSStaticCredentialsProvider}
import com.amazonaws.services.s3.{AmazonS3Client, AmazonS3ClientBuilder}
import com.amazonaws.services.s3.model.ObjectMetadata
import com.sumologic.sumobot.brain.Brain._

import scala.collection.JavaConverters._
import scala.collection.immutable

object S3Brain {
  def props(credentials: AWSCredentials,
            bucket: String,
            s3Key: String): Props = Props(classOf[S3Brain], credentials, bucket, s3Key)
}

class S3Brain(credentials: AWSCredentials,
              bucket: String,
              s3Key: String) extends Actor {

  private val s3Client = AmazonS3ClientBuilder.standard()
    .withCredentials(new AWSStaticCredentialsProvider(credentials)).build

  private var brainContents: Map[String, String] = loadFromS3()

  override def receive: Receive = {
    case Store(key, value) =>
      brainContents += (key -> value)
      saveToS3(brainContents)

    case Remove(key) =>
      brainContents -= key
      saveToS3(brainContents)

    case Retrieve(key) =>
      brainContents.get(key) match {
        case Some(value) => sender() ! ValueRetrieved(key, value)
        case None => sender() ! ValueMissing(key)
      }

    case ListValues(prefix) =>
      sender() ! ValueMap(brainContents.filter(_._1.startsWith(prefix)))
  }

  private def loadFromS3(): Map[String, String] = {
    if (s3Client.doesBucketExistV2(bucket)) {
      val props = new Properties()
      props.load(s3Client.getObject(bucket, s3Key).getObjectContent)
      immutable.Map(props.asScala.toSeq: _*)
    } else {
      Map.empty
    }
  }

  private def saveToS3(contents: Map[String, String]): Unit = {
    if (!s3Client.doesBucketExistV2(bucket)) {
      s3Client.createBucket(bucket)
    }

    val props = new Properties()
    props.putAll(contents.asJava)
    val out = new ByteArrayOutputStream()
    props.store(out, "")
    out.flush()
    out.close()
    val in = new ByteArrayInputStream(out.toByteArray)
    s3Client.putObject(bucket, s3Key, in, new ObjectMetadata())
  }
} 
Example 12
Source File: AWSAccounts.scala    From sumobot   with Apache License 2.0 5 votes vote down vote up
package com.sumologic.sumobot.core.aws

import com.amazonaws.auth.{AWSCredentials, BasicAWSCredentials}
import com.sumologic.sumobot.core.config.ListOfConfigs
import com.typesafe.config.Config

object AWSAccounts {
  def load(config: Config): Map[String, AWSCredentials] = {
    ListOfConfigs.parse(config, "aws") {
      (name, accountConfig) =>
        val key = accountConfig.getString(s"key.id")
        val secret = accountConfig.getString(s"key.secret")
        new BasicAWSCredentials(key, secret)
    }
  }
} 
Example 13
Source File: AWSSupport.scala    From sumobot   with Apache License 2.0 5 votes vote down vote up
package com.sumologic.sumobot.plugins.awssupport

import akka.actor.ActorLogging
import com.amazonaws.auth.{AWSCredentials, AWSStaticCredentialsProvider}
import com.amazonaws.services.support.AWSSupportClientBuilder
import com.amazonaws.services.support.model.{CaseDetails, DescribeCasesRequest}
import com.sumologic.sumobot.core.aws.AWSAccounts
import com.sumologic.sumobot.core.model.IncomingMessage
import com.sumologic.sumobot.plugins.BotPlugin

import scala.collection.JavaConverters._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.util.{Failure, Success, Try}

class AWSSupport
  extends BotPlugin
    with ActorLogging {

  case class CaseInAccount(account: String, caseDetails: CaseDetails)

  private val credentials: Map[String, AWSCredentials] =
    AWSAccounts.load(context.system.settings.config)

  private val clients = credentials.map{case (id, credentials) =>
    id ->
      AWSSupportClientBuilder.standard().withCredentials(new AWSStaticCredentialsProvider(credentials)).build()}

  override protected def help: String =
    s"""
       |I can tell you about AWS support tickets.
       |
       |list aws cases - List all AWS support tickets.
       |show aws case <case> - I'll show you more details about that case.
     """.stripMargin

  private val CaseDetails = matchText("show aws case (\\d+).*")

  private val ListCases = matchText("list aws cases")

  override protected def receiveIncomingMessage: ReceiveIncomingMessage = {

    case message@IncomingMessage(ListCases(), _, _, _, _, _, _) =>
      message.respondInFuture {
        msg =>
          val caseList = getAllCases.map(summary(_) + "\n").mkString("\n")
          msg.message(caseList)
      }

    case message@IncomingMessage(CaseDetails(caseId), _, _, _, _, _, _) =>
      message.respondInFuture {
        msg =>
          log.info(s"Looking for case $caseId")

          Try(getAllCases) match {
            case Success(cases) =>
              cases.find(_.caseDetails.getDisplayId == caseId) match {
                case None =>
                  msg.response("Not a known support case.")
                case Some(cse) =>
                  msg.message(details(cse))
              }
            case Failure(e) if e.getMessage.contains("Invalid case ID:") =>
              msg.response(s"Invalid case ID: $caseId")
          }
      }
  }

  private def getAllCases: Seq[CaseInAccount] = {
    clients.toSeq.par.flatMap {
      tpl =>
        val client = tpl._2
        val unresolved = client.describeCases(new DescribeCasesRequest()).getCases.asScala.toList
        val resolved = client.describeCases(new DescribeCasesRequest().withIncludeResolvedCases(true)).getCases.asScala.toList
        (unresolved ++ resolved).map(CaseInAccount(tpl._1, _))
    }.seq
  }

  private def summary(cia: CaseInAccount): String =
    s"*# ${cia.caseDetails.getDisplayId}:* ${cia.caseDetails.getSubject}\n" +
      s" - account: ${cia.account}, submitted by: ${cia.caseDetails.getSubmittedBy}, status: ${cia.caseDetails.getStatus}"

  private def details(cia: CaseInAccount): String = {
    val latest = cia.caseDetails.getRecentCommunications.getCommunications.asScala.head
    summary(cia) + "\n\n" +
      s"""
         |_${latest.getSubmittedBy} at ${latest.getTimeCreated}_
         |${latest.getBody}
    """.stripMargin
  }
} 
Example 14
Source File: S3BrainTest.scala    From sumobot   with Apache License 2.0 5 votes vote down vote up
package com.sumologic.sumobot.brain

import akka.actor.ActorSystem
import akka.pattern.ask
import akka.testkit.TestKit
import akka.util.Timeout
import com.amazonaws.auth.{AWSCredentials, AWSStaticCredentialsProvider}
import com.amazonaws.services.s3.AmazonS3ClientBuilder
import com.sumologic.sumobot.brain.Brain.ValueRetrieved
import com.sumologic.sumobot.core.aws.AWSAccounts
import com.sumologic.sumobot.test.annotated.SumoBotTestKit
import org.scalatest.{BeforeAndAfterAll, Matchers}

import scala.collection.JavaConverters._
import scala.concurrent.Await
import scala.concurrent.duration._
import scala.util.Random

class S3BrainTest
    extends SumoBotTestKit(ActorSystem("S3SingleObjectBrainTest"))
    with BeforeAndAfterAll
    with Matchers {

  lazy val credsOption = AWSAccounts.load(system.settings.config).values.headOption

  val bucketPrefix = "sumobot-s3-brain"

  // The tests here only run if there are valid AWS credentials in the configuration. Otherwise,
  // they're skipped.
  credsOption foreach {
    creds =>
      cleanupBuckets(creds)

      val bucket = bucketPrefix + randomString(5)

      "S3 brain" should {
        "persist the contents across reloads" in {
          implicit val timeout = Timeout(5.seconds)
          val s3Key = randomString(16)
          val firstBrain = system.actorOf(S3Brain.props(creds, bucket, s3Key))
          firstBrain ! Brain.Store("hello", "world")

          // Just wait for the next message to return.
          val firstRetrieval = firstBrain ? Brain.Retrieve("hello")
          val firstResult = Await.result(firstRetrieval, 5.seconds)
          firstResult match {
            case ValueRetrieved(k, v) =>
              k should be("hello")
              v should be("world")
            case wrongResult => fail(s"Did not get what we expected: $wrongResult")
          }

          // Since we wrote to S3, the 2nd brain should now have the value.
          val secondBrain = system.actorOf(S3Brain.props(creds, bucket, s3Key))
          val secondRetrieval = secondBrain ? Brain.Retrieve("hello")
          val secondResult = Await.result(secondRetrieval, 5.seconds)
          secondResult match {
            case ValueRetrieved(k, v) =>
              k should be("hello")
              v should be("world")
            case wrongResult => fail(s"Did not get what we expected: $wrongResult")
          }
        }
      }
  }

  private def randomString(length: Int): String = {
    val alphabet = ('a' to 'z').mkString + ('0' to '9').mkString
    (1 to length).
        map(_ => Random.nextInt(alphabet.length)).
        map(alphabet.charAt).mkString
  }

  override def afterAll() {
    TestKit.shutdownActorSystem(system)
    credsOption.foreach(cleanupBuckets)
  }

  def cleanupBuckets(creds: AWSCredentials): Unit = {
    val s3 = AmazonS3ClientBuilder.standard().withCredentials(new AWSStaticCredentialsProvider(creds)).build()
    s3.listBuckets().asScala.filter(_.getName.startsWith(bucketPrefix)).foreach {
      bucket =>
        println(s"Deleting S3 bucket ${bucket.getName}")
        val objects = s3.listObjects(bucket.getName).getObjectSummaries.asScala.map(_.getKey)
        objects.foreach {
          obj =>
            s3.deleteObject(bucket.getName, obj)
        }
        s3.deleteBucket(bucket.getName)
    }
  }
}