Java Code Examples for com.amazonaws.util.StringInputStream

The following examples show how to use com.amazonaws.util.StringInputStream. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
private void setupS3(String fileName, String version, String propertyContent)
		throws UnsupportedEncodingException {
	final S3ObjectId s3ObjectId = new S3ObjectId("bucket1", fileName);
	final GetObjectRequest request = new GetObjectRequest(s3ObjectId);

	final S3Object s3Object = new S3Object();
	s3Object.setObjectContent(new StringInputStream(propertyContent));

	if (version != null) {
		final ObjectMetadata metadata = new ObjectMetadata();
		metadata.setHeader("x-amz-version-id", version);
		s3Object.setObjectMetadata(metadata);
	}

	when(s3Client.getObject(argThat(new GetObjectRequestMatcher(request))))
			.thenReturn(s3Object);
}
 
Example 2
Source Project: cloudml   Source File: Scaler.java    License: GNU Lesser General Public License v3.0 6 votes vote down vote up
private Deployment cloneCurrentModel(){
    //need to clone the model
    JsonCodec jsonCodec=new JsonCodec();
    ByteArrayOutputStream baos=new ByteArrayOutputStream();
    jsonCodec.save(currentModel,baos);

    Deployment targetModel=new Deployment();
    try {
        String aString = new String(baos.toByteArray(),"UTF-8");
        InputStream is = new StringInputStream(aString);
        targetModel = (Deployment) jsonCodec.load(is);
    } catch (UnsupportedEncodingException e) {
        e.printStackTrace();
    }
    return targetModel;
}
 
Example 3
@Test
public void parseParameters() throws IOException {
	YAMLParameterFileParser parser = new YAMLParameterFileParser();
	String json = "bar: foo";
	Collection<Parameter> parameters = parser.parseParams(new StringInputStream(json));
	Assertions.assertThat(parameters).containsExactlyInAnyOrder(
			new Parameter()
			.withParameterKey("bar")
			.withParameterValue("foo")
	);
}
 
Example 4
@Test
public void parseParameterCollection() throws IOException {
	YAMLParameterFileParser parser = new YAMLParameterFileParser();
	String json = "bar:\n  - foo1\n  - foo2";
	Collection<Parameter> parameters = parser.parseParams(new StringInputStream(json));
	Assertions.assertThat(parameters).containsExactlyInAnyOrder(
			new Parameter()
					.withParameterKey("bar")
					.withParameterValue("foo1,foo2")
	);
}
 
Example 5
@Test
public void parseParameters() throws IOException {
	JSONParameterFileParser parser = new JSONParameterFileParser();
	String json = "[{\"ParameterKey\": \"bar\", \"ParameterValue\": \"foo\"}]";
	Collection<Parameter> parameters = parser.parseParams(new StringInputStream(json));
	Assertions.assertThat(parameters).containsExactlyInAnyOrder(
			new Parameter()
			.withParameterKey("bar")
			.withParameterValue("foo")
	);
}
 
Example 6
@Test
public void parseKeyParameters() throws IOException {
	JSONParameterFileParser parser = new JSONParameterFileParser();
	String json = "[{\"ParameterKey\": \"bar\", \"UsePreviousValue\": true}]";
	Collection<Parameter> parameters = parser.parseParams(new StringInputStream(json));
	Assertions.assertThat(parameters).containsExactlyInAnyOrder(
			new Parameter()
					.withParameterKey("bar")
					.withUsePreviousValue(true)
	);
}
 
Example 7
@Test
@Override
public void test() throws Exception {
	AmazonS3 amazonS3Client = TestUtils.getPropertyValue(this.s3MessageHandler, "transferManager.s3",
			AmazonS3.class);

	InputStream payload = new StringInputStream("a");
	Message<?> message = MessageBuilder.withPayload(payload)
			.setHeader("key", "myInputStream")
			.build();

	this.channels.input().send(message);

	ArgumentCaptor<PutObjectRequest> putObjectRequestArgumentCaptor =
			ArgumentCaptor.forClass(PutObjectRequest.class);
	verify(amazonS3Client, atLeastOnce()).putObject(putObjectRequestArgumentCaptor.capture());

	PutObjectRequest putObjectRequest = putObjectRequestArgumentCaptor.getValue();
	assertThat(putObjectRequest.getBucketName(), equalTo(S3_BUCKET));
	assertThat(putObjectRequest.getKey(), equalTo("myInputStream"));
	assertNull(putObjectRequest.getFile());
	assertNotNull(putObjectRequest.getInputStream());

	ObjectMetadata metadata = putObjectRequest.getMetadata();
	assertThat(metadata.getContentMD5(), equalTo(Md5Utils.md5AsBase64(payload)));
	assertThat(metadata.getContentLength(), equalTo(1L));
	assertThat(metadata.getContentType(), equalTo(MediaType.APPLICATION_JSON_VALUE));
	assertThat(metadata.getContentDisposition(), equalTo("test.json"));
}
 
Example 8
Source Project: aws-signing-request-interceptor   Source File: SkdSignerUtil.java    License: MIT License 5 votes vote down vote up
static public String getExpectedAuthorizationHeader(Request request) throws Exception {
    // create the signable request
    DefaultRequest signableRequest = new DefaultRequest(null, request.getServiceName());
    signableRequest.setEndpoint(new URI("http://" + request.getHost()));
    signableRequest.setResourcePath(request.getUri());
    signableRequest.setHttpMethod(HttpMethodName.valueOf(request.getHttpMethod()));
    signableRequest.setContent(new StringInputStream(request.getBody()));
    if (request.getHeaders() != null)
        signableRequest.setHeaders(request.getHeaders());
    if (request.getQueryParams() != null) {
        Map<String, List<String>> convertedQueryParams = new HashMap<>();
        for (String paramName : request.getQueryParams().keySet()) {
            convertedQueryParams.put(paramName, new ArrayList<>(request.getQueryParams().get(paramName)));
        }
        signableRequest.setParameters(convertedQueryParams);
    }

    /*
       Init the signer class

       Note: Double uri encoding is off simple before the signature does not match the expected signature of the test cases
       if it is enabled.  This was a bit unexpected because AWSElasticsearchClient (AWS SDK Class) enabled double URI encoding
       in the signer by default.  I can only assume that double encoding is needed when accessing the service but not when accessing
       elasticsearch.
     */
    AWS4Signer aws4Signer = new AWS4Signer(false);
    aws4Signer.setServiceName(request.getServiceName());
    aws4Signer.setRegionName(request.getRegion());
    Method method1 = AWS4Signer.class.getDeclaredMethod("setOverrideDate", Date.class);
    method1.setAccessible(true);
    method1.invoke(aws4Signer, request.getDate());
    aws4Signer.sign(signableRequest, request.getCredentialsProvider().getCredentials());

    return (String) signableRequest.getHeaders().get("Authorization");
}
 
Example 9
Source Project: ache   Source File: StaticFileHandlerFilter.java    License: Apache License 2.0 5 votes vote down vote up
private void writeResponse(Request request, Response response, String file) {
    OutputStream wrappedOutputStream;
    try {
        response.header("Content-Type", "text/html");
        response.status(200);
        wrappedOutputStream = GzipUtils.checkAndWrap(request.raw(), response.raw(), false);
        IOUtils.copy(new StringInputStream(file), wrappedOutputStream);
        wrappedOutputStream.flush();
        wrappedOutputStream.close();
    } catch (IOException e) {
        throw new RuntimeException("Failed to write HTTP response", e);
    }
}
 
Example 10
private void setRequestPayload(Request<ListVoicesRequest> request, ListVoicesRequest listVoicesRequest) {
    try {
        StringWriter stringWriter = new StringWriter();
        JSONWriter jsonWriter = new JSONWriter(stringWriter);

        jsonWriter.object();
        if (listVoicesRequest.getVoice() != null) {
            Voice voice = listVoicesRequest.getVoice();

            jsonWriter.key(JSON_KEY_VOICE);
            jsonWriter.object();

            if (voice.getGender() != null) {
                jsonWriter.key(JSON_KEY_GENDER).value(voice.getGender());
            }
            if (voice.getLanguage() != null) {
                jsonWriter.key(JSON_KEY_LANGUAGE).value(voice.getLanguage());
            }
            if (voice.getName() != null) {
                jsonWriter.key(JSON_KEY_NAME).value(voice.getName());
            }

            jsonWriter.endObject();
        }
        jsonWriter.endObject();

        String snippet = stringWriter.toString();
        byte[] content = snippet.getBytes(UTF_8);
        request.setContent(new StringInputStream(snippet));
        request.addHeader("Content-Length", Integer.toString(content.length));
    } catch (Throwable t) {
        throw new AmazonClientException("Unable to marshall request to JSON: " + t.getMessage(), t);
    }
}
 
Example 11
Source Project: nifi   Source File: TestFetchS3Object.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testGetObjectVersion() throws IOException {
    runner.setProperty(FetchS3Object.REGION, "us-east-1");
    runner.setProperty(FetchS3Object.BUCKET, "request-bucket");
    runner.setProperty(FetchS3Object.VERSION_ID, "${s3.version}");
    final Map<String, String> attrs = new HashMap<>();
    attrs.put("filename", "request-key");
    attrs.put("s3.version", "request-version");
    runner.enqueue(new byte[0], attrs);

    S3Object s3ObjectResponse = new S3Object();
    s3ObjectResponse.setBucketName("response-bucket-name");
    s3ObjectResponse.setObjectContent(new StringInputStream("Some Content"));
    ObjectMetadata metadata = Mockito.spy(ObjectMetadata.class);
    metadata.setContentDisposition("key/path/to/file.txt");
    Mockito.when(metadata.getVersionId()).thenReturn("response-version");
    s3ObjectResponse.setObjectMetadata(metadata);
    Mockito.when(mockS3Client.getObject(Mockito.any())).thenReturn(s3ObjectResponse);

    runner.run(1);

    ArgumentCaptor<GetObjectRequest> captureRequest = ArgumentCaptor.forClass(GetObjectRequest.class);
    Mockito.verify(mockS3Client, Mockito.times(1)).getObject(captureRequest.capture());
    GetObjectRequest request = captureRequest.getValue();
    assertEquals("request-bucket", request.getBucketName());
    assertEquals("request-key", request.getKey());
    assertEquals("request-version", request.getVersionId());

    runner.assertAllFlowFilesTransferred(FetchS3Object.REL_SUCCESS, 1);
    final List<MockFlowFile> ffs = runner.getFlowFilesForRelationship(FetchS3Object.REL_SUCCESS);
    MockFlowFile ff = ffs.get(0);
    ff.assertAttributeEquals("s3.bucket", "response-bucket-name");
    ff.assertAttributeEquals(CoreAttributes.FILENAME.key(), "file.txt");
    ff.assertAttributeEquals(CoreAttributes.PATH.key(), "key/path/to");
    ff.assertAttributeEquals(CoreAttributes.ABSOLUTE_PATH.key(), "key/path/to/file.txt");
    ff.assertAttributeEquals("s3.version", "response-version");
    ff.assertContentEquals("Some Content");
}
 
Example 12
Source Project: digdag   Source File: S3WaitIT.java    License: Apache License 2.0 4 votes vote down vote up
@Test
public void testRun()
        throws Exception
{
    String key = UUID.randomUUID().toString();


    Path outfile = folder.newFolder().toPath().resolve("out");

    createProject(projectDir);
    addWorkflow(projectDir, "acceptance/s3/s3_wait.dig");

    Id projectId = TestUtils.pushProject(server.endpoint(), projectDir);

    // Configure AWS credentials
    client.setProjectSecret(projectId, "aws.s3.access_key_id", TEST_S3_ACCESS_KEY_ID);
    client.setProjectSecret(projectId, "aws.s3.secret_access_key", TEST_S3_SECRET_ACCESS_KEY);
    client.setProjectSecret(projectId, "aws.s3.endpoint", TEST_S3_ENDPOINT);

    // Start workflow
    String projectName = projectDir.getFileName().toString();
    Id attemptId = startWorkflow(server.endpoint(), projectName, "s3_wait", ImmutableMap.of(
            "path", bucket + "/" + key,
            "outfile", outfile.toString()
    ));

    // Wait for s3 polling to show up in logs
    expect(Duration.ofSeconds(30), () -> {
        String attemptLogs = TestUtils.getAttemptLogs(client, attemptId);
        return attemptLogs.contains("s3_wait>: " + bucket + "/" + key);
    });

    // Verify that the dependent task has not been executed
    assertThat(Files.exists(outfile), is(false));

    // Verify that the attempt is not yet done
    RestSessionAttempt attempt = client.getSessionAttempt(attemptId);
    assertThat(attempt.getDone(), is(false));

    // Create the file that the workflow is waiting for
    String content = "hello world";
    s3.putObject(bucket, key, new StringInputStream(content), new ObjectMetadata());

    // Expect the attempt to finish and the dependent task to be executed
    expect(Duration.ofMinutes(2), attemptSuccess(server.endpoint(), attemptId));
    assertThat(Files.exists(outfile), is(true));

    JsonNode objectMetadata = MAPPER.readTree(Files.readAllBytes(outfile));
    int contentLength = objectMetadata.get("metadata").get("Content-Length").asInt();
    assertThat(contentLength, is(content.length()));
}
 
Example 13
Source Project: digdag   Source File: BigQueryIT.java    License: Apache License 2.0 4 votes vote down vote up
@Before
public void setUp()
        throws Exception
{
    assumeThat(GCP_CREDENTIAL, not(isEmptyOrNullString()));

    proxyServer = TestUtils.startRequestFailingProxy(2, new ConcurrentHashMap<>(), HttpResponseStatus.INTERNAL_SERVER_ERROR,
            (req, reqCount) -> {
                // io.digdag.standards.operator.gcp.BqJobRunner sends "CONNECT www.googleapis.com" frequently. It can easily cause infinite retry.
                // So the following custom logic should be used for that kind of requests.
                if (req.getMethod().equals(HttpMethod.CONNECT)) {
                    return Optional.of(reqCount % 5 == 0);
                }
                return Optional.absent();
            });

    server = TemporaryDigdagServer.builder()
            .environment(ImmutableMap.of(
                    "https_proxy", "http://" + proxyServer.getListenAddress().getHostString() + ":" + proxyServer.getListenAddress().getPort())
            )
            .withRandomSecretEncryptionKey()
            .build();
    server.start();

    projectDir = folder.getRoot().toPath();
    createProject(projectDir);
    projectName = projectDir.getFileName().toString();
    projectId = pushProject(server.endpoint(), projectDir, projectName);

    outfile = folder.newFolder().toPath().resolve("outfile");

    digdagClient = DigdagClient.builder()
            .host(server.host())
            .port(server.port())
            .build();

    digdagClient.setProjectSecret(projectId, "gcp.credential", GCP_CREDENTIAL);

    gcpCredential = GoogleCredential.fromStream(new StringInputStream(GCP_CREDENTIAL));

    assertThat(gcpProjectId, not(isEmptyOrNullString()));

    jsonFactory = new JacksonFactory();
    transport = GoogleNetHttpTransport.newTrustedTransport();
    gcs = gcsClient(gcpCredential);
    bq = bqClient(gcpCredential);
}
 
Example 14
Source Project: digdag   Source File: GcsWaitIT.java    License: Apache License 2.0 4 votes vote down vote up
@Before
public void setUp()
        throws Exception
{
    assumeThat(GCP_CREDENTIAL, not(isEmptyOrNullString()));
    assumeThat(GCS_TEST_BUCKET, not(isEmptyOrNullString()));

    proxyServer = TestUtils.startRequestFailingProxy(1);

    server = TemporaryDigdagServer.builder()
            .environment(ImmutableMap.of(
                    "https_proxy", "http://" + proxyServer.getListenAddress().getHostString() + ":" + proxyServer.getListenAddress().getPort())
            )
            .withRandomSecretEncryptionKey()
            .build();
    server.start();

    projectDir = folder.getRoot().toPath();
    createProject(projectDir);
    projectName = projectDir.getFileName().toString();
    projectId = pushProject(server.endpoint(), projectDir, projectName);

    outfile = folder.newFolder().toPath().resolve("outfile");

    digdagClient = DigdagClient.builder()
            .host(server.host())
            .port(server.port())
            .build();

    digdagClient.setProjectSecret(projectId, "gcp.credential", GCP_CREDENTIAL);

    gcpCredential = GoogleCredential.fromStream(new StringInputStream(GCP_CREDENTIAL));

    gcpProjectId = DigdagClient.objectMapper().readTree(GCP_CREDENTIAL).get("project_id").asText();
    assertThat(gcpProjectId, not(isEmptyOrNullString()));

    jsonFactory = new JacksonFactory();
    transport = GoogleNetHttpTransport.newTrustedTransport();
    gcs = gcsClient(gcpCredential);

    client = DigdagClient.builder()
            .host(server.host())
            .port(server.port())
            .build();
}
 
Example 15
Source Project: nifi   Source File: TestFetchS3Object.java    License: Apache License 2.0 4 votes vote down vote up
@Test
public void testGetObject() throws IOException {
    runner.setProperty(FetchS3Object.REGION, "us-east-1");
    runner.setProperty(FetchS3Object.BUCKET, "request-bucket");
    final Map<String, String> attrs = new HashMap<>();
    attrs.put("filename", "request-key");
    runner.enqueue(new byte[0], attrs);

    S3Object s3ObjectResponse = new S3Object();
    s3ObjectResponse.setBucketName("response-bucket-name");
    s3ObjectResponse.setKey("response-key");
    s3ObjectResponse.setObjectContent(new StringInputStream("Some Content"));
    ObjectMetadata metadata = Mockito.spy(ObjectMetadata.class);
    metadata.setContentDisposition("key/path/to/file.txt");
    metadata.setContentType("text/plain");
    metadata.setContentMD5("testMD5hash");
    Date expiration = new Date();
    metadata.setExpirationTime(expiration);
    metadata.setExpirationTimeRuleId("testExpirationRuleId");
    Map<String, String> userMetadata = new HashMap<>();
    userMetadata.put("userKey1", "userValue1");
    userMetadata.put("userKey2", "userValue2");
    metadata.setUserMetadata(userMetadata);
    metadata.setSSEAlgorithm("testAlgorithm");
    Mockito.when(metadata.getETag()).thenReturn("test-etag");
    s3ObjectResponse.setObjectMetadata(metadata);
    Mockito.when(mockS3Client.getObject(Mockito.any())).thenReturn(s3ObjectResponse);

    runner.run(1);

    ArgumentCaptor<GetObjectRequest> captureRequest = ArgumentCaptor.forClass(GetObjectRequest.class);
    Mockito.verify(mockS3Client, Mockito.times(1)).getObject(captureRequest.capture());
    GetObjectRequest request = captureRequest.getValue();
    assertEquals("request-bucket", request.getBucketName());
    assertEquals("request-key", request.getKey());
    assertFalse(request.isRequesterPays());
    assertNull(request.getVersionId());

    runner.assertAllFlowFilesTransferred(FetchS3Object.REL_SUCCESS, 1);
    final List<MockFlowFile> ffs = runner.getFlowFilesForRelationship(FetchS3Object.REL_SUCCESS);
    MockFlowFile ff = ffs.get(0);
    ff.assertAttributeEquals("s3.bucket", "response-bucket-name");
    ff.assertAttributeEquals(CoreAttributes.FILENAME.key(), "file.txt");
    ff.assertAttributeEquals(CoreAttributes.PATH.key(), "key/path/to");
    ff.assertAttributeEquals(CoreAttributes.ABSOLUTE_PATH.key(), "key/path/to/file.txt");
    ff.assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), "text/plain");
    ff.assertAttributeEquals("hash.value", "testMD5hash");
    ff.assertAttributeEquals("hash.algorithm", "MD5");
    ff.assertAttributeEquals("s3.etag", "test-etag");
    ff.assertAttributeEquals("s3.expirationTime", String.valueOf(expiration.getTime()));
    ff.assertAttributeEquals("s3.expirationTimeRuleId", "testExpirationRuleId");
    ff.assertAttributeEquals("userKey1", "userValue1");
    ff.assertAttributeEquals("userKey2", "userValue2");
    ff.assertAttributeEquals("s3.sseAlgorithm", "testAlgorithm");
    ff.assertContentEquals("Some Content");
}
 
Example 16
Source Project: nifi   Source File: TestFetchS3Object.java    License: Apache License 2.0 4 votes vote down vote up
@Test
public void testGetObjectWithRequesterPays() throws IOException {
    runner.setProperty(FetchS3Object.REGION, "us-east-1");
    runner.setProperty(FetchS3Object.BUCKET, "request-bucket");
    runner.setProperty(FetchS3Object.REQUESTER_PAYS, "true");
    final Map<String, String> attrs = new HashMap<>();
    attrs.put("filename", "request-key");
    runner.enqueue(new byte[0], attrs);

    S3Object s3ObjectResponse = new S3Object();
    s3ObjectResponse.setBucketName("response-bucket-name");
    s3ObjectResponse.setKey("response-key");
    s3ObjectResponse.setObjectContent(new StringInputStream("Some Content"));
    ObjectMetadata metadata = Mockito.spy(ObjectMetadata.class);
    metadata.setContentDisposition("key/path/to/file.txt");
    metadata.setContentType("text/plain");
    metadata.setContentMD5("testMD5hash");
    Date expiration = new Date();
    metadata.setExpirationTime(expiration);
    metadata.setExpirationTimeRuleId("testExpirationRuleId");
    Map<String, String> userMetadata = new HashMap<>();
    userMetadata.put("userKey1", "userValue1");
    userMetadata.put("userKey2", "userValue2");
    metadata.setUserMetadata(userMetadata);
    metadata.setSSEAlgorithm("testAlgorithm");
    Mockito.when(metadata.getETag()).thenReturn("test-etag");
    s3ObjectResponse.setObjectMetadata(metadata);
    Mockito.when(mockS3Client.getObject(Mockito.any())).thenReturn(s3ObjectResponse);

    runner.run(1);

    ArgumentCaptor<GetObjectRequest> captureRequest = ArgumentCaptor.forClass(GetObjectRequest.class);
    Mockito.verify(mockS3Client, Mockito.times(1)).getObject(captureRequest.capture());
    GetObjectRequest request = captureRequest.getValue();
    assertEquals("request-bucket", request.getBucketName());
    assertEquals("request-key", request.getKey());
    assertTrue(request.isRequesterPays());
    assertNull(request.getVersionId());

    runner.assertAllFlowFilesTransferred(FetchS3Object.REL_SUCCESS, 1);
    final List<MockFlowFile> ffs = runner.getFlowFilesForRelationship(FetchS3Object.REL_SUCCESS);
    MockFlowFile ff = ffs.get(0);
    ff.assertAttributeEquals("s3.bucket", "response-bucket-name");
    ff.assertAttributeEquals(CoreAttributes.FILENAME.key(), "file.txt");
    ff.assertAttributeEquals(CoreAttributes.PATH.key(), "key/path/to");
    ff.assertAttributeEquals(CoreAttributes.ABSOLUTE_PATH.key(), "key/path/to/file.txt");
    ff.assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), "text/plain");
    ff.assertAttributeEquals("hash.value", "testMD5hash");
    ff.assertAttributeEquals("hash.algorithm", "MD5");
    ff.assertAttributeEquals("s3.etag", "test-etag");
    ff.assertAttributeEquals("s3.expirationTime", String.valueOf(expiration.getTime()));
    ff.assertAttributeEquals("s3.expirationTimeRuleId", "testExpirationRuleId");
    ff.assertAttributeEquals("userKey1", "userValue1");
    ff.assertAttributeEquals("userKey2", "userValue2");
    ff.assertAttributeEquals("s3.sseAlgorithm", "testAlgorithm");
    ff.assertContentEquals("Some Content");
}
 
Example 17
Source Project: cloudstack   Source File: DigestHelperTest.java    License: Apache License 2.0 4 votes vote down vote up
@Test
public void testZeroPaddedDigestMD5() throws Exception {
    inputStream2 = new StringInputStream(INPUT_STRING_NO2);
    String result = DigestHelper.digest(MD5, inputStream2).toString();
    Assert.assertEquals(ZERO_PADDED_MD5_CHECKSUM, result);
}
 
Example 18
Source Project: cloudstack   Source File: DigestHelperTest.java    License: Apache License 2.0 4 votes vote down vote up
@Test
public void testZeroPaddedDigestSHA256() throws Exception {
    inputStream2 = new StringInputStream(INPUT_STRING_NO3);
    String result = DigestHelper.digest(SHA_256, inputStream2).toString();
    Assert.assertEquals(ZERO_PADDED_SHA256_CHECKSUM, result);
}
 
Example 19
Source Project: cloudstack   Source File: DigestHelperTest.java    License: Apache License 2.0 4 votes vote down vote up
@BeforeClass
public static void init() throws UnsupportedEncodingException {
    inputStream = new StringInputStream(INPUT_STRING);
}