org.apache.nifi.reporting.InitializationException Java Examples

The following examples show how to use org.apache.nifi.reporting.InitializationException. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: StandardProcessorTestRunner.java    From nifi with Apache License 2.0 6 votes vote down vote up
@Override
public void addControllerService(final String identifier, final ControllerService service, final Map<String, String> properties) throws InitializationException {
    final MockComponentLog logger = new MockComponentLog(identifier, service);
    controllerServiceLoggers.put(identifier, logger);
    final MockStateManager serviceStateManager = new MockStateManager(service);
    final MockControllerServiceInitializationContext initContext = new MockControllerServiceInitializationContext(
            requireNonNull(service), requireNonNull(identifier), logger, serviceStateManager, kerberosContext);
    controllerServiceStateManagers.put(identifier, serviceStateManager);
    initContext.addControllerServices(context);
    service.initialize(initContext);

    final Map<PropertyDescriptor, String> resolvedProps = new HashMap<>();
    for (final Map.Entry<String, String> entry : properties.entrySet()) {
        resolvedProps.put(service.getPropertyDescriptor(entry.getKey()), entry.getValue());
    }

    try {
        ReflectionUtils.invokeMethodsWithAnnotation(OnAdded.class, service);
    } catch (final InvocationTargetException | IllegalAccessException | IllegalArgumentException e) {
        throw new InitializationException(e);
    }

    context.addControllerService(identifier, service, resolvedProps, null);
}
 
Example #2
Source File: TestPrometheusRecordSink.java    From nifi with Apache License 2.0 6 votes vote down vote up
private PrometheusRecordSink initTask() throws InitializationException {

        final ComponentLog logger = mock(ComponentLog.class);
        final PrometheusRecordSink task = new PrometheusRecordSink();
        ConfigurationContext context = mock(ConfigurationContext.class);
        final StateManager stateManager = new MockStateManager(task);

        final PropertyValue pValue = mock(StandardPropertyValue.class);
        when(context.getProperty(PrometheusMetricsUtil.METRICS_ENDPOINT_PORT)).thenReturn(new MockPropertyValue(portString));
        when(context.getProperty(PrometheusRecordSink.SSL_CONTEXT)).thenReturn(pValue);
        when(pValue.asControllerService(SSLContextService.class)).thenReturn(null);

        final ControllerServiceInitializationContext initContext = new MockControllerServiceInitializationContext(task, UUID.randomUUID().toString(), logger, stateManager);
        task.initialize(initContext);
        task.onScheduled(context);

        return task;
    }
 
Example #3
Source File: StandardProcessorTestRunner.java    From localization_nifi with Apache License 2.0 6 votes vote down vote up
@Override
public void addControllerService(final String identifier, final ControllerService service, final Map<String, String> properties) throws InitializationException {
    final MockComponentLog logger = new MockComponentLog(identifier, service);
    controllerServiceLoggers.put(identifier, logger);
    final MockStateManager serviceStateManager = new MockStateManager(service);
    final MockControllerServiceInitializationContext initContext = new MockControllerServiceInitializationContext(requireNonNull(service), requireNonNull(identifier), logger, serviceStateManager);
    controllerServiceStateManagers.put(identifier, serviceStateManager);
    initContext.addControllerServices(context);
    service.initialize(initContext);

    final Map<PropertyDescriptor, String> resolvedProps = new HashMap<>();
    for (final Map.Entry<String, String> entry : properties.entrySet()) {
        resolvedProps.put(service.getPropertyDescriptor(entry.getKey()), entry.getValue());
    }

    try {
        ReflectionUtils.invokeMethodsWithAnnotation(OnAdded.class, service);
    } catch (final InvocationTargetException | IllegalAccessException | IllegalArgumentException e) {
        throw new InitializationException(e);
    }

    context.addControllerService(identifier, service, resolvedProps, null);
}
 
Example #4
Source File: TestFetchDistributedMapCache.java    From nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void testSingleFlowFile() throws InitializationException, IOException {
    service.put("key","value", new FetchDistributedMapCache.StringSerializer(), new FetchDistributedMapCache.StringSerializer());
    runner.setProperty(FetchDistributedMapCache.PROP_CACHE_ENTRY_IDENTIFIER, "${cacheKeyAttribute}");

    final Map<String, String> props = new HashMap<>();
    props.put("cacheKeyAttribute", "key");
    String flowFileContent = "content";
    runner.enqueue(flowFileContent.getBytes("UTF-8"), props);

    runner.run();

    runner.assertAllFlowFilesTransferred(FetchDistributedMapCache.REL_SUCCESS, 1);
    runner.assertTransferCount(FetchDistributedMapCache.REL_SUCCESS, 1);

    final MockFlowFile outputFlowFile = runner.getFlowFilesForRelationship(FetchDistributedMapCache.REL_SUCCESS).get(0);
    outputFlowFile.assertContentEquals("value");
    runner.clearTransferState();

}
 
Example #5
Source File: TestWait.java    From nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void testWait() throws InitializationException {
    runner.setProperty(Wait.RELEASE_SIGNAL_IDENTIFIER, "${releaseSignalAttribute}");

    final Map<String, String> props = new HashMap<>();
    props.put("releaseSignalAttribute", "1");
    runner.enqueue(new byte[]{}, props);

    runner.run();

    // no cache key attribute
    runner.assertAllFlowFilesTransferred(Wait.REL_WAIT, 1);
    MockFlowFile ff = runner.getFlowFilesForRelationship(Wait.REL_WAIT).get(0);
    ff.assertAttributeExists(Wait.WAIT_START_TIMESTAMP); // timestamp must be set
    runner.clearTransferState();
}
 
Example #6
Source File: TestPutSQL.java    From nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void testFailInMiddleWithBadParameterValueAndNotSupportTransaction() throws InitializationException, ProcessException, SQLException, IOException {
    final TestRunner runner = TestRunners.newTestRunner(PutSQL.class);
    runner.setProperty(PutSQL.SUPPORT_TRANSACTIONS, "false");
    testFailInMiddleWithBadParameterValue(runner);
    runner.run();

    runner.assertTransferCount(PutSQL.REL_SUCCESS, 1);
    runner.assertTransferCount(PutSQL.REL_FAILURE, 1);
    runner.assertTransferCount(PutSQL.REL_RETRY, 2);

    try (final Connection conn = service.getConnection()) {
        try (final Statement stmt = conn.createStatement()) {
            final ResultSet rs = stmt.executeQuery("SELECT * FROM PERSONS_AI");
            assertTrue(rs.next());
            assertEquals(1, rs.getInt(1));
            assertEquals("Mark", rs.getString(2));
            assertEquals(84, rs.getInt(3));
            assertFalse(rs.next());
        }
    }
}
 
Example #7
Source File: TestPutHBaseJSON.java    From localization_nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void testMultipleJsonDocsRouteToFailure() throws IOException, InitializationException {
    final TestRunner runner = getTestRunner(DEFAULT_TABLE_NAME, DEFAULT_COLUMN_FAMILY, "1");
    final MockHBaseClientService hBaseClient = getHBaseClientService(runner);
    runner.setProperty(PutHBaseJSON.ROW_ID, DEFAULT_ROW);

    final String content1 = "{ \"field1\" : \"value1\", \"field2\" : \"value2\" }";
    final String content2 = "{ \"field3\" : \"value3\", \"field4\" : \"value4\" }";
    final String content = "[ " + content1 + " , " + content2 + " ]";

    runner.enqueue(content.getBytes(StandardCharsets.UTF_8));
    runner.run();
    runner.assertAllFlowFilesTransferred(PutHBaseCell.REL_FAILURE, 1);

    final MockFlowFile outFile = runner.getFlowFilesForRelationship(PutHBaseCell.REL_FAILURE).get(0);
    outFile.assertContentEquals(content);

    // should be no provenance events
    assertEquals(0, runner.getProvenanceEvents().size());

    // no puts should have made it to the client
    assertEquals(0, hBaseClient.getFlowFilePuts().size());
}
 
Example #8
Source File: TestXMLRecordSetWriter.java    From nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void testDefaultSingleRecord() throws IOException, InitializationException {
    XMLRecordSetWriter writer = new XMLRecordSetWriter();
    TestRunner runner = setup(writer);

    runner.setProperty(TestXMLRecordSetWriterProcessor.MULTIPLE_RECORDS, "false");

    runner.enableControllerService(writer);
    runner.enqueue("");
    runner.run();
    runner.assertQueueEmpty();
    runner.assertAllFlowFilesTransferred(TestXMLRecordSetWriterProcessor.SUCCESS, 1);

    String expected = "<array_record><array_field>1</array_field><array_field></array_field><array_field>3</array_field>" +
            "<name1>val1</name1><name2></name2></array_record>";

    String actual = new String(runner.getContentAsByteArray(runner.getFlowFilesForRelationship(TestXMLRecordSetWriterProcessor.SUCCESS).get(0)));
    assertThat(expected, CompareMatcher.isSimilarTo(actual).ignoreWhitespace().withNodeMatcher(new DefaultNodeMatcher(ElementSelectors.byNameAndText)));
}
 
Example #9
Source File: TestSelectHive_1_1QL.java    From nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void testNoTimeLimit() throws InitializationException, ClassNotFoundException, SQLException, IOException {
    invokeOnTrigger(QUERY_WITH_EL, true, "Avro");

    final List<ProvenanceEventRecord> provenanceEvents = runner.getProvenanceEvents();
    assertEquals(3, provenanceEvents.size());

    final ProvenanceEventRecord provenance0 = provenanceEvents.get(0);
    assertEquals(ProvenanceEventType.FORK, provenance0.getEventType());

    final ProvenanceEventRecord provenance1 = provenanceEvents.get(1);
    assertEquals(ProvenanceEventType.FETCH, provenance1.getEventType());
    assertEquals("jdbc:derby:target/db;create=true", provenance1.getTransitUri());

    final ProvenanceEventRecord provenance2 = provenanceEvents.get(2);
    assertEquals(ProvenanceEventType.FORK, provenance2.getEventType());
}
 
Example #10
Source File: HBase_1_1_2_ClientService.java    From localization_nifi with Apache License 2.0 6 votes vote down vote up
@Override
protected void init(ControllerServiceInitializationContext config) throws InitializationException {
    kerberosConfigFile = config.getKerberosConfigurationFile();
    kerberosProperties = getKerberosProperties(kerberosConfigFile);

    List<PropertyDescriptor> props = new ArrayList<>();
    props.add(HADOOP_CONF_FILES);
    props.add(kerberosProperties.getKerberosPrincipal());
    props.add(kerberosProperties.getKerberosKeytab());
    props.add(ZOOKEEPER_QUORUM);
    props.add(ZOOKEEPER_CLIENT_PORT);
    props.add(ZOOKEEPER_ZNODE_PARENT);
    props.add(HBASE_CLIENT_RETRIES);
    props.add(PHOENIX_CLIENT_JAR_LOCATION);
    this.properties = Collections.unmodifiableList(props);
}
 
Example #11
Source File: QuerySolrIT.java    From nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void testSslContextService() throws IOException, InitializationException {
    final QuerySolr proc = Mockito.mock(QuerySolr.class);
    TestRunner runner = TestRunners.newTestRunner(proc);
    runner.setProperty(SolrUtils.SOLR_TYPE, SolrUtils.SOLR_TYPE_CLOUD.getValue());
    runner.setProperty(SolrUtils.SOLR_LOCATION, SOLR_LOCATION);
    runner.setProperty(SolrUtils.COLLECTION, SOLR_COLLECTION);

    final SSLContextService sslContextService = new MockSSLContextService();
    runner.addControllerService("ssl-context", sslContextService);
    runner.enableControllerService(sslContextService);

    runner.setProperty(SolrUtils.SSL_CONTEXT_SERVICE, "ssl-context");
    proc.onScheduled(runner.getProcessContext());
    Mockito.verify(proc, Mockito.times(1)).createSolrClient(Mockito.any(ProcessContext.class), Mockito.eq(SOLR_LOCATION));

}
 
Example #12
Source File: PutParquetTest.java    From nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void testFailureExceptionRenamingShouldRouteToFailure() throws InitializationException, IOException {
    final PutParquet proc = new PutParquet() {
        @Override
        protected void rename(FileSystem fileSystem, Path srcFile, Path destFile)
                throws IOException, InterruptedException, FailureException {
            throw new FailureException("FailureException renaming");
        }
    };

    configure(proc, 10);

    final String filename = "testFailureExceptionRenamingShouldRouteToFailure-" + System.currentTimeMillis();

    final Map<String,String> flowFileAttributes = new HashMap<>();
    flowFileAttributes.put(CoreAttributes.FILENAME.key(), filename);

    testRunner.enqueue("trigger", flowFileAttributes);
    testRunner.run();
    testRunner.assertAllFlowFilesTransferred(PutParquet.REL_FAILURE, 1);

    // verify we don't have the temp dot file after success
    final File tempAvroParquetFile = new File(DIRECTORY + "/." + filename);
    Assert.assertFalse(tempAvroParquetFile.exists());
}
 
Example #13
Source File: TestPutDistributedMapCache.java    From localization_nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void testSingleFlowFile() throws InitializationException, IOException {
    runner.setProperty(PutDistributedMapCache.CACHE_ENTRY_IDENTIFIER, "${cacheKeyAttribute}");

    final Map<String, String> props = new HashMap<>();
    props.put("cacheKeyAttribute", "1");

    String flowFileContent = "content";
    runner.enqueue(flowFileContent.getBytes("UTF-8"), props);

    runner.run();

    runner.assertAllFlowFilesTransferred(PutDistributedMapCache.REL_SUCCESS, 1);
    runner.assertTransferCount(PutDistributedMapCache.REL_SUCCESS, 1);
    byte[] value = service.get("1", new PutDistributedMapCache.StringSerializer(), new PutDistributedMapCache.CacheValueDeserializer());
    assertEquals(flowFileContent, new String(value, "UTF-8"));

    final MockFlowFile outputFlowFile = runner.getFlowFilesForRelationship(PutDistributedMapCache.REL_SUCCESS).get(0);
    outputFlowFile.assertAttributeEquals("cached", "true");
    outputFlowFile.assertContentEquals(flowFileContent);
    runner.clearTransferState();

}
 
Example #14
Source File: TestPutSolrRecord.java    From nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void testHttpUrlShouldNotAllowSSLContext() throws InitializationException {
    final TestRunner runner = TestRunners.newTestRunner(PutSolrRecord.class);
    MockRecordParser recordParser = new MockRecordParser();
    recordParser.addRecord(1, "Abhinav","R",8,"Chemistry","term1", 98);
    runner.addControllerService("parser", recordParser);
    runner.enableControllerService(recordParser);
    runner.setProperty(PutSolrRecord.RECORD_READER, "parser");

    runner.setProperty(SolrUtils.SOLR_TYPE, SolrUtils.SOLR_TYPE_STANDARD.getValue());
    runner.setProperty(SolrUtils.SOLR_LOCATION, "http://localhost:8443/solr");
    runner.assertValid();

    final SSLContextService sslContextService = new MockSSLContextService();
    runner.addControllerService("ssl-context", sslContextService);
    runner.enableControllerService(sslContextService);

    runner.setProperty(SolrUtils.SSL_CONTEXT_SERVICE, "ssl-context");
    runner.assertNotValid();
}
 
Example #15
Source File: TestFetchDistributedMapCache.java    From localization_nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void testSingleFlowFileToAttribute() throws InitializationException, IOException {
    service.put("key","value", new FetchDistributedMapCache.StringSerializer(), new FetchDistributedMapCache.StringSerializer());
    runner.setProperty(FetchDistributedMapCache.PROP_CACHE_ENTRY_IDENTIFIER, "${cacheKeyAttribute}");
    runner.setProperty(FetchDistributedMapCache.PROP_PUT_CACHE_VALUE_IN_ATTRIBUTE, "test");

    final Map<String, String> props = new HashMap<>();
    props.put("cacheKeyAttribute", "key");
    String flowFileContent = "content";
    runner.enqueue(flowFileContent.getBytes("UTF-8"), props);

    runner.run();

    runner.assertAllFlowFilesTransferred(FetchDistributedMapCache.REL_SUCCESS, 1);
    runner.assertTransferCount(FetchDistributedMapCache.REL_SUCCESS, 1);

    final MockFlowFile outputFlowFile = runner.getFlowFilesForRelationship(FetchDistributedMapCache.REL_SUCCESS).get(0);
    outputFlowFile.assertAttributeEquals("test","value");
    runner.clearTransferState();

}
 
Example #16
Source File: TestAzureLogAnalyticsReportingTask.java    From nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void testAuthorization() throws IOException, InterruptedException, InitializationException {

    reportingContextStub.setProperty(AzureLogAnalyticsReportingTask.SEND_JVM_METRICS.getName(), "true");
    testedReportingTask.initialize(reportingInitContextStub);
    reportingContextStub.getEventAccess().setProcessGroupStatus(rootGroupStatus);
    testedReportingTask.onTrigger(reportingContextStub);

    HttpPost postRequest = testedReportingTask.getPostRequest();
    ArgumentCaptor<String> captorAuthorization = ArgumentCaptor.forClass(String.class);
    ArgumentCaptor<String> captorXMsDate = ArgumentCaptor.forClass(String.class);
    ArgumentCaptor<String> captorTimeGeneratedField = ArgumentCaptor.forClass(String.class);
    verify(postRequest, atLeast(1)).addHeader( eq("Authorization"), captorAuthorization.capture());
    verify(postRequest, atLeast(1)).addHeader( eq("x-ms-date"), captorXMsDate.capture());
    verify(postRequest, atLeast(1)).addHeader( eq("time-generated-field"), captorTimeGeneratedField.capture());
    assertTrue(captorAuthorization.getValue().contains("SharedKey"));
}
 
Example #17
Source File: TestPutDistributedMapCache.java    From nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void testMaxCacheEntrySize() throws InitializationException, IOException {

    runner.setProperty(PutDistributedMapCache.CACHE_ENTRY_IDENTIFIER, "${uuid}");
    runner.setProperty(PutDistributedMapCache.CACHE_ENTRY_MAX_BYTES, "10 B");

    // max length is 10 bytes, flow file content is 20 bytes
    String flowFileContent = "contentwhichistoobig";
    runner.enqueue(flowFileContent.getBytes("UTF-8"));

    runner.run();

    // no cache key attribute
    runner.assertAllFlowFilesTransferred(PutDistributedMapCache.REL_FAILURE, 1);
    runner.assertTransferCount(PutDistributedMapCache.REL_FAILURE, 1);

    final MockFlowFile outputFlowFile = runner.getFlowFilesForRelationship(PutDistributedMapCache.REL_FAILURE).get(0);
    outputFlowFile.assertAttributeNotExists("cached");
    outputFlowFile.assertContentEquals(flowFileContent);


    runner.clearTransferState();
    runner.setProperty(PutDistributedMapCache.CACHE_ENTRY_MAX_BYTES, "1 MB");
}
 
Example #18
Source File: TestSiteToSiteReportingRecordSink.java    From nifi with Apache License 2.0 6 votes vote down vote up
public MockSiteToSiteReportingRecordSink initTask(Map<PropertyDescriptor, String> customProperties) throws InitializationException, IOException {

        final MockSiteToSiteReportingRecordSink task = new MockSiteToSiteReportingRecordSink();
        context = Mockito.mock(ConfigurationContext.class);
        StateManager stateManager = new MockStateManager(task);

        final PropertyValue pValue = Mockito.mock(StandardPropertyValue.class);
        MockRecordWriter writer = new MockRecordWriter(null, false); // No header, don't quote values
        Mockito.when(context.getProperty(RecordSinkService.RECORD_WRITER_FACTORY)).thenReturn(pValue);
        Mockito.when(pValue.asControllerService(RecordSetWriterFactory.class)).thenReturn(writer);

        final ComponentLog logger = Mockito.mock(ComponentLog.class);
        final ControllerServiceInitializationContext initContext = new MockControllerServiceInitializationContext(writer, UUID.randomUUID().toString(), logger, stateManager);
        task.initialize(initContext);

        return task;
    }
 
Example #19
Source File: TestActionHandlerLookup.java    From nifi with Apache License 2.0 6 votes vote down vote up
@Before
public void setup() throws InitializationException {
    alertHandler = new MockPropertyActionHandler();
    logHandler = new MockPropertyActionHandler();
    actionHandlerLookup = new ActionHandlerLookup();

    runner = TestRunners.newTestRunner(TestProcessor.class);

    final String alertIdentifier = "alert-handler";
    runner.addControllerService(alertIdentifier, alertHandler);

    final String logIdentifier = "log-handler";
    runner.addControllerService(logIdentifier, logHandler);

    runner.addControllerService("action-handler-lookup", actionHandlerLookup);
    runner.setProperty(actionHandlerLookup, "ALERT", alertIdentifier);
    runner.setProperty(actionHandlerLookup, "LOG", logIdentifier);

    runner.enableControllerService(alertHandler);
    runner.enableControllerService(logHandler);
    runner.enableControllerService(actionHandlerLookup);

}
 
Example #20
Source File: TestConvertJSONToSQL.java    From nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void testInsertBoolToInteger() throws InitializationException, ProcessException, SQLException, IOException {
    final TestRunner runner = TestRunners.newTestRunner(ConvertJSONToSQL.class);

    runner.addControllerService("dbcp", service);
    runner.enableControllerService(service);
    runner.setProperty(ConvertJSONToSQL.CONNECTION_POOL, "dbcp");
    runner.setProperty(ConvertJSONToSQL.TABLE_NAME, "PERSONS");
    runner.setProperty(ConvertJSONToSQL.STATEMENT_TYPE, "INSERT");
    runner.enqueue(Paths.get("src/test/resources/TestConvertJSONToSQL/person-with-bool.json"));
    runner.run();

    runner.assertTransferCount(ConvertJSONToSQL.REL_ORIGINAL, 1);
    runner.getFlowFilesForRelationship(ConvertJSONToSQL.REL_ORIGINAL).get(0).assertAttributeEquals(FRAGMENT_COUNT.key(), "1");
    runner.assertTransferCount(ConvertJSONToSQL.REL_SQL, 1);
    final MockFlowFile out = runner.getFlowFilesForRelationship(ConvertJSONToSQL.REL_SQL).get(0);
    out.assertAttributeEquals("sql.args.1.type", String.valueOf(java.sql.Types.INTEGER));
    out.assertAttributeEquals("sql.args.1.value", "1");
    out.assertAttributeEquals("sql.args.2.type", String.valueOf(java.sql.Types.VARCHAR));
    out.assertAttributeEquals("sql.args.2.value", "Bool");
    out.assertAttributeEquals("sql.args.3.type", String.valueOf(java.sql.Types.INTEGER));
    out.assertAttributeEquals("sql.args.3.value", "1");

    out.assertContentEquals("INSERT INTO PERSONS (ID, NAME, CODE) VALUES (?, ?, ?)");
}
 
Example #21
Source File: TestConvertJSONToSQL.java    From nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void testDelete() throws InitializationException, ProcessException, SQLException, IOException {
    final TestRunner runner = TestRunners.newTestRunner(ConvertJSONToSQL.class);
    runner.addControllerService("dbcp", service);
    runner.enableControllerService(service);

    runner.setProperty(ConvertJSONToSQL.CONNECTION_POOL, "dbcp");
    runner.setProperty(ConvertJSONToSQL.TABLE_NAME, "PERSONS");
    runner.setProperty(ConvertJSONToSQL.STATEMENT_TYPE, "DELETE");
    runner.enqueue(Paths.get("src/test/resources/TestConvertJSONToSQL/person-1.json"));
    runner.run();

    runner.assertTransferCount(ConvertJSONToSQL.REL_ORIGINAL, 1);
    runner.getFlowFilesForRelationship(ConvertJSONToSQL.REL_ORIGINAL).get(0).assertAttributeEquals(FRAGMENT_COUNT.key(), "1");
    runner.assertTransferCount(ConvertJSONToSQL.REL_SQL, 1);
    final MockFlowFile out = runner.getFlowFilesForRelationship(ConvertJSONToSQL.REL_SQL).get(0);
    out.assertAttributeEquals("sql.args.1.type", String.valueOf(java.sql.Types.INTEGER));
    out.assertAttributeEquals("sql.args.1.value", "1");
    out.assertAttributeEquals("sql.args.2.type", String.valueOf(java.sql.Types.VARCHAR));
    out.assertAttributeEquals("sql.args.2.value", "Mark");
    out.assertAttributeEquals("sql.args.3.type", String.valueOf(java.sql.Types.INTEGER));
    out.assertAttributeEquals("sql.args.3.value", "48");

    out.assertContentEquals("DELETE FROM PERSONS WHERE ID = ? AND NAME = ? AND CODE = ?");
}
 
Example #22
Source File: DBCPServiceTest.java    From nifi with Apache License 2.0 6 votes vote down vote up
/**
 * Test get database connection using Derby. Get many times, release immediately and getConnection should not fail.
 */
@Test
public void testGetManyNormal() throws InitializationException, SQLException {
    final TestRunner runner = TestRunners.newTestRunner(TestProcessor.class);
    final DBCPConnectionPool service = new DBCPConnectionPool();
    runner.addControllerService("test-exhaust", service);

    // set embedded Derby database connection url
    runner.setProperty(service, DBCPConnectionPool.DATABASE_URL, "jdbc:derby:" + dbLocation + ";create=true");
    runner.setProperty(service, DBCPConnectionPool.DB_USER, "tester");
    runner.setProperty(service, DBCPConnectionPool.DB_PASSWORD, "testerp");
    runner.setProperty(service, DBCPConnectionPool.DB_DRIVERNAME, "org.apache.derby.jdbc.EmbeddedDriver");

    runner.enableControllerService(service);

    runner.assertValid(service);
    final DBCPService dbcpService = (DBCPService) runner.getProcessContext().getControllerServiceLookup().getControllerService("test-exhaust");
    Assert.assertNotNull(dbcpService);

    for (int i = 0; i < 1000; i++) {
        final Connection connection = dbcpService.getConnection();
        Assert.assertNotNull(connection);
        connection.close(); // will return connection to pool
    }
}
 
Example #23
Source File: FetchParquetTest.java    From nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void testFetchParquetWithDecimal() throws InitializationException, IOException {
    configure(proc);

    final File parquetDir = new File(DIRECTORY);
    final File parquetFile = new File(parquetDir,"testFetchParquetWithDecimal.parquet");
    final int numUsers = 10;
    writeParquetUsersWithDecimal(parquetFile, numUsers);

    final Map<String,String> attributes = new HashMap<>();
    attributes.put(CoreAttributes.PATH.key(), parquetDir.getAbsolutePath());
    attributes.put(CoreAttributes.FILENAME.key(), parquetFile.getName());

    testRunner.enqueue("TRIGGER", attributes);
    testRunner.run();
    testRunner.assertAllFlowFilesTransferred(FetchParquet.REL_SUCCESS, 1);
}
 
Example #24
Source File: FetchParquetTest.java    From nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void testFetchWithArray() throws InitializationException, IOException {
    configure(proc);

    final File parquetDir = new File(DIRECTORY);
    final File parquetFile = new File(parquetDir,"testFetchParquetWithArrayToCSV.parquet");
    final int numUsers = 10;
    writeParquetUsersWithArray(parquetFile, numUsers);

    final Map<String,String> attributes = new HashMap<>();
    attributes.put(CoreAttributes.PATH.key(), parquetDir.getAbsolutePath());
    attributes.put(CoreAttributes.FILENAME.key(), parquetFile.getName());

    testRunner.enqueue("TRIGGER", attributes);
    testRunner.run();
    testRunner.assertAllFlowFilesTransferred(FetchParquet.REL_SUCCESS, 1);
}
 
Example #25
Source File: TestPutSQL.java    From nifi with Apache License 2.0 5 votes vote down vote up
private void testFailInMiddleWithBadStatement(final TestRunner runner) throws InitializationException {
    runner.addControllerService("dbcp", service);
    runner.enableControllerService(service);
    runner.setProperty(PutSQL.OBTAIN_GENERATED_KEYS, "false");
    runner.setProperty(PutSQL.CONNECTION_POOL, "dbcp");
    runner.enqueue("INSERT INTO PERSONS_AI (NAME, CODE) VALUES ('Mark', 84)".getBytes());
    runner.enqueue("INSERT INTO PERSONS_AI".getBytes()); // intentionally wrong syntax
    runner.enqueue("INSERT INTO PERSONS_AI (NAME, CODE) VALUES ('Tom', 3)".getBytes());
    runner.enqueue("INSERT INTO PERSONS_AI (NAME, CODE) VALUES ('Harry', 44)".getBytes());
}
 
Example #26
Source File: TestGetHBase.java    From nifi with Apache License 2.0 5 votes vote down vote up
@Test
public void testPersistAndRecoverFromLocalState() throws InitializationException {
    final File stateFile = new File("target/test-recover-state.bin");
    if (!stateFile.delete() && stateFile.exists()) {
        Assert.fail("Could not delete state file " + stateFile);
    }
    proc.setStateFile(stateFile);

    final long now = System.currentTimeMillis();

    final Map<String, String> cells = new HashMap<>();
    cells.put("greeting", "hello");
    cells.put("name", "nifi");

    hBaseClient.addResult("row0", cells, now - 2);
    hBaseClient.addResult("row1", cells, now - 1);
    hBaseClient.addResult("row2", cells, now - 1);
    hBaseClient.addResult("row3", cells, now);

    runner.run(100);
    runner.assertAllFlowFilesTransferred(GetHBase.REL_SUCCESS, 4);

    hBaseClient.addResult("row4", cells, now + 1);
    runner.run();
    runner.assertAllFlowFilesTransferred(GetHBase.REL_SUCCESS, 5);
    runner.clearTransferState();

    proc = new MockGetHBase(stateFile);

    hBaseClient.addResult("row0", cells, now - 2);
    hBaseClient.addResult("row1", cells, now - 1);
    hBaseClient.addResult("row2", cells, now - 1);
    hBaseClient.addResult("row3", cells, now);

    runner.run(100);
    runner.assertAllFlowFilesTransferred(GetHBase.REL_SUCCESS, 0);
}
 
Example #27
Source File: TestPutHBaseCell.java    From nifi with Apache License 2.0 5 votes vote down vote up
private MockHBaseClientService getHBaseClientService(TestRunner runner) throws InitializationException {
    final MockHBaseClientService hBaseClient = new MockHBaseClientService();
    runner.addControllerService("hbaseClient", hBaseClient);
    runner.enableControllerService(hBaseClient);
    runner.setProperty(PutHBaseCell.HBASE_CLIENT_SERVICE, "hbaseClient");
    return hBaseClient;
}
 
Example #28
Source File: PutBigQueryBatchIT.java    From nifi with Apache License 2.0 5 votes vote down vote up
@Test
public void PutBigQueryBatchLargePayloadTest() throws InitializationException, IOException {
    String methodName = Thread.currentThread().getStackTrace()[1].getMethodName();
    runner = setCredentialsControllerService(runner);
    runner.setProperty(AbstractGCPProcessor.GCP_CREDENTIALS_PROVIDER_SERVICE, CONTROLLER_SERVICE);
    runner.setProperty(BigQueryAttributes.DATASET_ATTR, dataset.getDatasetId().getDataset());
    runner.setProperty(BigQueryAttributes.TABLE_NAME_ATTR, methodName);
    runner.setProperty(BigQueryAttributes.SOURCE_TYPE_ATTR, FormatOptions.json().getType());
    runner.setProperty(BigQueryAttributes.TABLE_SCHEMA_ATTR, TABLE_SCHEMA_STRING);

    // Allow one bad record to deal with the extra line break.
    runner.setProperty(BigQueryAttributes.MAX_BADRECORDS_ATTR, String.valueOf(1));

    String str = "{\"field_1\":\"Daniel is great\",\"field_2\":\"Here's to the crazy ones. The misfits. The rebels. The troublemakers." +
            " The round pegs in the square holes. The ones who see things differently. They're not fond of rules. And they have no respect" +
            " for the status quo. You can quote them, disagree with them, glorify or vilify them. About the only thing you can't do is ignore" +
            " them. Because they change things. They push the human race forward. And while some may see them as the crazy ones, we see genius." +
            " Because the people who are crazy enough to think they can change the world, are the ones who do.\"}\n";
    Path tempFile = Files.createTempFile(methodName, "");
    try (BufferedWriter writer = Files.newBufferedWriter(tempFile)) {

        for (int i = 0; i < 2; i++) {
            for (int ii = 0; ii < 1_000_000; ii++) {
                writer.write(str);
            }
            writer.flush();
        }
        writer.flush();
    }

    runner.enqueue(tempFile);
    runner.run(1);
    for (MockFlowFile flowFile : runner.getFlowFilesForRelationship(AbstractBigQueryProcessor.REL_SUCCESS)) {
        validateNoServiceExceptionAttribute(flowFile);
    }
    runner.assertAllFlowFilesTransferred(AbstractBigQueryProcessor.REL_SUCCESS, 1);
}
 
Example #29
Source File: TestHBase_1_1_2_ClientMapCacheService.java    From nifi with Apache License 2.0 5 votes vote down vote up
@Test
public void testFetch() throws InitializationException, IOException {
    final String key = "key1";
    final String value = "value1";
    final byte[] revision = value.getBytes();

    final TestRunner runner = TestRunners.newTestRunner(TestProcessor.class);

    // Mock an HBase Table so we can verify the put operations later
    final Table table = Mockito.mock(Table.class);
    when(table.getName()).thenReturn(TableName.valueOf(tableName));

    // create the controller service and link it to the test processor
    final MockHBaseClientService service = configureHBaseClientService(runner, table);
    runner.assertValid(service);

    final HBaseClientService hBaseClientService = runner.getProcessContext().getProperty(TestProcessor.HBASE_CLIENT_SERVICE)
            .asControllerService(HBaseClientService.class);

    final AtomicDistributedMapCacheClient<byte[]> cacheService = configureHBaseCacheService(runner, hBaseClientService);
    runner.assertValid(cacheService);

    final AtomicDistributedMapCacheClient<byte[]> hBaseCacheService = runner.getProcessContext().getProperty(TestProcessor.HBASE_CACHE_SERVICE)
            .asControllerService(AtomicDistributedMapCacheClient.class);

    hBaseCacheService.put(key, value, stringSerializer, stringSerializer);

    final AtomicCacheEntry<String, String, byte[]> atomicCacheEntry = hBaseCacheService.fetch(key, stringSerializer, stringDeserializer);

    assertEquals(key, atomicCacheEntry.getKey());
    assertEquals(value, atomicCacheEntry.getValue());
    assertArrayEquals(revision, atomicCacheEntry.getRevision().get());
}
 
Example #30
Source File: TestAzureLogAnalyticsReportingTask.java    From nifi with Apache License 2.0 5 votes vote down vote up
@Test
public void testOnTriggerWithOnePG() throws IOException, InterruptedException, InitializationException {
    initTestGroupStatuses();
    reportingContextStub.setProperty(AzureLogAnalyticsReportingTask.PROCESS_GROUP_IDS.getName(), TEST_GROUP1_ID);
    testedReportingTask.initialize(reportingInitContextStub);
    reportingContextStub.getEventAccess().setProcessGroupStatus(rootGroupStatus);
    reportingContextStub.getEventAccess().setProcessGroupStatus(TEST_GROUP1_ID, testGroupStatus);
    testedReportingTask.onTrigger(reportingContextStub);

    List<Metric> collectedMetrics = testedReportingTask.getMetricsCollected();
    TestVerification.assertDatatFlowMetrics(collectedMetrics);
}