org.apache.nifi.processor.ProcessSession Java Examples

The following examples show how to use org.apache.nifi.processor.ProcessSession. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: RouteText.java    From nifi with Apache License 2.0 6 votes vote down vote up
private void appendLine(final ProcessSession session, final Map<Relationship, Map<Group, FlowFile>> flowFileMap, final Relationship relationship,
    final FlowFile original, final String line, final Charset charset, final Group group) {

    final Map<Group, FlowFile> groupToFlowFileMap = flowFileMap.computeIfAbsent(relationship, k -> new HashMap<>());

    FlowFile flowFile = groupToFlowFileMap.get(group);
    if (flowFile == null) {
        flowFile = session.create(original);
    }

    flowFile = session.append(flowFile, new OutputStreamCallback() {
        @Override
        public void process(final OutputStream out) throws IOException {
            out.write(line.getBytes(charset));
        }
    });

    groupToFlowFileMap.put(group, flowFile);
}
 
Example #2
Source File: TestSplitJson.java    From nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void testSplit_pathToInputStringNullValue() throws Exception {
    final TestRunner testRunner = TestRunners.newTestRunner(new SplitJson());
    testRunner.setProperty(SplitJson.ARRAY_JSON_PATH_EXPRESSION, "$.*");
    ProcessSession session = testRunner.getProcessSessionFactory().createSession();
    FlowFile ff = session.create();
    ff = session.write(ff, new OutputStreamCallback() {
        @Override
        public void process(OutputStream out) throws IOException {
            try (OutputStream outputStream = new BufferedOutputStream(out)) {
                outputStream.write("null".getBytes(StandardCharsets.UTF_8));
            }
        }
    });
    testRunner.enqueue(ff);
    testRunner.run();
    testRunner.assertTransferCount(SplitJson.REL_FAILURE, 1);
}
 
Example #3
Source File: ConsumeWindowsEventLog.java    From localization_nifi with Apache License 2.0 6 votes vote down vote up
/**
 * Cleanup
 */
@OnStopped
public void stop() {
    if (isSubscribed()) {
        wEvtApi.EvtClose(subscriptionHandle);
    }
    subscriptionHandle = null;
    evtSubscribeCallback = null;
    if (!renderedXMLs.isEmpty()) {
        if (sessionFactory != null) {
            getLogger().info("Finishing processing leftover events");
            ProcessSession session = sessionFactory.createSession();
            processQueue(session);
        } else {
            throw new ProcessException("Stopping the processor but there is no ProcessSessionFactory stored and there are messages in the internal queue. Removing the processor now will " +
                    "clear the queue but will result in DATA LOSS. This is normally due to starting the processor, receiving events and stopping before the onTrigger happens. The messages " +
                    "in the internal queue cannot finish processing until until the processor is triggered to run.");
        }
    }
    sessionFactory = null;
    provenanceUri = null;
    renderedXMLs = null;
}
 
Example #4
Source File: ResultProcessorTest.java    From nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void testProcessResultFileFalure() {
    ProcessSession processSession = mock(ProcessSession.class);
    ComponentLog componentLog = mock(ComponentLog.class);
    FlowFile flowFile = mock(FlowFile.class);
    Exception exception = new Exception();
    String name = "name";

    when(processSession.putAttribute(eq(flowFile), anyString(), anyString())).thenReturn(flowFile);

    resultProcessor.process(processSession, componentLog, flowFile, exception, name);
    verify(processSession).putAttribute(flowFile, CoreAttributes.FILENAME.key(), name);
    verify(processSession).putAttribute(flowFile, CoreAttributes.MIME_TYPE.key(), MediaType.APPLICATION_XML_UTF_8.toString());
    verify(processSession).transfer(flowFile, failureRelationship);
    verify(componentLog).error(eq(ResultProcessor.UNABLE_TO_PROCESS_DUE_TO), any(Object[].class), eq(exception));
}
 
Example #5
Source File: TestAttributesToJSON.java    From localization_nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void testAttributes_emptyListUserSpecifiedAttributes() throws Exception {
    final TestRunner testRunner = TestRunners.newTestRunner(new AttributesToJSON());
    testRunner.setProperty(AttributesToJSON.DESTINATION, AttributesToJSON.DESTINATION_ATTRIBUTE);

    ProcessSession session = testRunner.getProcessSessionFactory().createSession();
    FlowFile ff = session.create();

    ff = session.putAttribute(ff, TEST_ATTRIBUTE_KEY, TEST_ATTRIBUTE_VALUE);

    testRunner.enqueue(ff);
    testRunner.run();

    testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).get(0)
            .assertAttributeExists(AttributesToJSON.JSON_ATTRIBUTE_NAME);
    testRunner.assertTransferCount(AttributesToJSON.REL_SUCCESS, 1);
    testRunner.assertTransferCount(AttributesToJSON.REL_FAILURE, 0);

    String json = testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS)
            .get(0).getAttribute(AttributesToJSON.JSON_ATTRIBUTE_NAME);

    ObjectMapper mapper = new ObjectMapper();
    Map<String, String> val = mapper.readValue(json, HashMap.class);
    assertTrue(val.get(TEST_ATTRIBUTE_KEY).equals(TEST_ATTRIBUTE_VALUE));
}
 
Example #6
Source File: AbstractPutEventProcessor.java    From nifi with Apache License 2.0 6 votes vote down vote up
/**
 * Helper method to acquire an available ChannelSender from the pool. If the pool is empty then the a new sender is created.
 *
 * @param context
 *            - the current process context.
 *
 * @param session
 *            - the current process session.
 * @param flowFile
 *            - the FlowFile being processed in this session.
 *
 * @return ChannelSender - the sender that has been acquired or null if no sender is available and a new sender cannot be created.
 */
protected ChannelSender acquireSender(final ProcessContext context, final ProcessSession session, final FlowFile flowFile) {
    ChannelSender sender = senderPool.poll();
    if (sender == null) {
        try {
            getLogger().debug("No available connections, creating a new one...");
            sender = createSender(context);
        } catch (IOException e) {
            getLogger().error("No available connections, and unable to create a new one, transferring {} to failure",
                    new Object[]{flowFile}, e);
            session.transfer(flowFile, REL_FAILURE);
            session.commit();
            context.yield();
            sender = null;
        }
    }

    return sender;
}
 
Example #7
Source File: TestAttributesToJSON.java    From localization_nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void testAttribute_includeCoreAttributesContent() throws IOException {
    final TestRunner testRunner = TestRunners.newTestRunner(new AttributesToJSON());
    testRunner.setProperty(AttributesToJSON.DESTINATION, AttributesToJSON.DESTINATION_CONTENT);
    testRunner.setProperty(AttributesToJSON.INCLUDE_CORE_ATTRIBUTES, "true");

    ProcessSession session = testRunner.getProcessSessionFactory().createSession();
    FlowFile ff = session.create();

    testRunner.enqueue(ff);
    testRunner.run();

    List<MockFlowFile> flowFilesForRelationship = testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS);

    testRunner.assertTransferCount(AttributesToJSON.REL_FAILURE, 0);
    testRunner.assertTransferCount(AttributesToJSON.REL_SUCCESS, 1);

    MockFlowFile flowFile = flowFilesForRelationship.get(0);

    assertEquals(AttributesToJSON.APPLICATION_JSON, flowFile.getAttribute(CoreAttributes.MIME_TYPE.key()));

    Map<String, String> val = new ObjectMapper().readValue(flowFile.toByteArray(), HashMap.class);
    assertEquals(3, val.size());
    Set<String> coreAttributes = Arrays.stream(CoreAttributes.values()).map(CoreAttributes::key).collect(Collectors.toSet());
    val.keySet().forEach(k -> assertTrue(coreAttributes.contains(k)));
}
 
Example #8
Source File: TestSplitJson.java    From nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void testSplit_pathToNullValue() throws Exception {
    final TestRunner testRunner = TestRunners.newTestRunner(new SplitJson());
    testRunner.setProperty(SplitJson.ARRAY_JSON_PATH_EXPRESSION, "$.nullField");

    ProcessSession session = testRunner.getProcessSessionFactory().createSession();
    FlowFile ff = session.create();

    ff = session.write(ff, new OutputStreamCallback() {
        @Override
        public void process(OutputStream out) throws IOException {
            try (OutputStream outputStream = new BufferedOutputStream(out)) {
                outputStream.write("{\"stringField\": \"String Value\", \"nullField\": null}".getBytes(StandardCharsets.UTF_8));
            }
        }
    });

    testRunner.enqueue(ff);
    testRunner.run();

    testRunner.assertTransferCount(SplitJson.REL_FAILURE, 1);
}
 
Example #9
Source File: PutHiveStreaming.java    From nifi with Apache License 2.0 6 votes vote down vote up
private void appendAvroRecords(ProcessSession session, byte[] avroHeader, DataFileWriter<GenericRecord> writer,
                               AtomicReference<FlowFile> flowFileRef, List<HiveStreamingRecord> hRecords) {

    flowFileRef.set(session.append(flowFileRef.get(), (out) -> {
        if (hRecords != null) {
            // Initialize the writer again as append mode, so that Avro header is written only once.
            writer.appendTo(new SeekableByteArrayInput(avroHeader), out);
            try {
                for (HiveStreamingRecord hRecord : hRecords) {
                    writer.append(hRecord.getRecord());
                }
            } catch (IOException ioe) {
                // The records were put to Hive Streaming successfully, but there was an error while writing the
                // Avro records to the flow file. Log as an error and move on.
                logger.error("Error writing Avro records (which were sent successfully to Hive Streaming) to the flow file, " + ioe, ioe);
            }
        }
        writer.close();
    }));
}
 
Example #10
Source File: AbstractHTMLProcessor.java    From localization_nifi with Apache License 2.0 6 votes vote down vote up
/**
 * Parses the Jsoup HTML document from the FlowFile input content.
 *
 * @param inputFlowFile Input FlowFile containing the HTML
 * @param context ProcessContext
 * @param session ProcessSession
 *
 * @return Jsoup Document
 */
protected Document parseHTMLDocumentFromFlowfile(final FlowFile inputFlowFile, final ProcessContext context, final ProcessSession session) {
    final AtomicReference<Document> doc = new AtomicReference<>();
    session.read(inputFlowFile, new InputStreamCallback() {
        @Override
        public void process(InputStream inputStream) throws IOException {
            final String baseUrl = getBaseUrl(inputFlowFile, context);
            if (baseUrl == null || baseUrl.isEmpty()) {
                throw new RuntimeException("Base URL was empty.");
            }
            doc.set(Jsoup.parse(inputStream,
                    context.getProperty(HTML_CHARSET).getValue(),
                    baseUrl));
        }
    });
    return doc.get();
}
 
Example #11
Source File: GetTCP.java    From nifi with Apache License 2.0 6 votes vote down vote up
@Override
public void handle(InetSocketAddress sourceAddress, byte[] message, boolean partialMessage) {
    ProcessSession session = this.sessionFactory.createSession();
    FlowFile flowFile = session.create();
    flowFile = session.write(flowFile, new OutputStreamCallback() {
        @Override
        public void process(OutputStream out) throws IOException {
            out.write(message);
        }
    });
    flowFile = session.putAttribute(flowFile, SOURCE_ENDPOINT_ATTRIBUTE, sourceAddress.toString());
    if (!GetTCP.this.dynamicAttributes.isEmpty()) {
        flowFile = session.putAllAttributes(flowFile, GetTCP.this.dynamicAttributes);
    }
    if (partialMessage) {
        session.transfer(flowFile, REL_PARTIAL);
    } else {
        session.transfer(flowFile, REL_SUCCESS);
    }
    session.commit();
}
 
Example #12
Source File: FetchGridFS.java    From nifi with Apache License 2.0 6 votes vote down vote up
private String getQuery(ProcessSession session, ProcessContext context, FlowFile input) throws IOException {
    String queryString;
    if (context.getProperty(FILE_NAME).isSet()) {
        String fileName = context.getProperty(FILE_NAME).evaluateAttributeExpressions(input).getValue();
        queryString = String.format("{ \"filename\": \"%s\"}", fileName);
    } else if (context.getProperty(QUERY).isSet()) {
        queryString = context.getProperty(QUERY).evaluateAttributeExpressions(input).getValue();
    } else {
        ByteArrayOutputStream out = new ByteArrayOutputStream();
        session.exportTo(input, out);
        out.close();
        queryString = new String(out.toByteArray());
    }

    return queryString;
}
 
Example #13
Source File: ResultProcessorTest.java    From localization_nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void testProcessResultFileFalure() {
    ProcessSession processSession = mock(ProcessSession.class);
    ComponentLog componentLog = mock(ComponentLog.class);
    FlowFile flowFile = mock(FlowFile.class);
    Exception exception = new Exception();
    String name = "name";

    when(processSession.putAttribute(eq(flowFile), anyString(), anyString())).thenReturn(flowFile);

    resultProcessor.process(processSession, componentLog, flowFile, exception, name);
    verify(processSession).putAttribute(flowFile, CoreAttributes.FILENAME.key(), name);
    verify(processSession).putAttribute(flowFile, CoreAttributes.MIME_TYPE.key(), MediaType.APPLICATION_XML_UTF_8.toString());
    verify(processSession).transfer(flowFile, failureRelationship);
    verify(componentLog).error(eq(ResultProcessor.UNABLE_TO_PROCESS_DUE_TO), any(Object[].class), eq(exception));
}
 
Example #14
Source File: DistributeLoad.java    From nifi with Apache License 2.0 5 votes vote down vote up
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) {
    final FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }

    final DistributionStrategy strategy = strategyRef.get();
    final Set<Relationship> available = context.getAvailableRelationships();
    final int numRelationships = context.getProperty(NUM_RELATIONSHIPS).asInteger();
    final boolean allDestinationsAvailable = (available.size() == numRelationships);
    if (!allDestinationsAvailable && strategy.requiresAllDestinationsAvailable()) {
        // can't transfer the FlowFiles. Roll back and yield
        session.rollback();
        context.yield();
        return;
    }

    final Relationship relationship = strategy.mapToRelationship(context, flowFile);
    if (relationship == null) {
        // can't transfer the FlowFiles. Roll back and yield
        session.rollback();
        context.yield();
        return;
    }

    // add an attribute capturing which relationship a flowfile was routed through
    session.putAttribute(flowFile, RELATIONSHIP_ATTRIBUTE, relationship.getName());

    session.transfer(flowFile, relationship);
    session.getProvenanceReporter().route(flowFile, relationship);
}
 
Example #15
Source File: GenerateFlowFile.java    From localization_nifi with Apache License 2.0 5 votes vote down vote up
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) {
    final byte[] data;
    if (context.getProperty(UNIQUE_FLOWFILES).asBoolean()) {
        data = generateData(context);
    } else if(context.getProperty(CUSTOM_TEXT).isSet()) {
        data = context.getProperty(CUSTOM_TEXT).evaluateAttributeExpressions().getValue().getBytes();
    } else {
        data = this.data.get();
    }

    Map<PropertyDescriptor, String> processorProperties = context.getProperties();
    Map<String, String> generatedAttributes = new HashMap<String, String>();
    for (final Map.Entry<PropertyDescriptor, String> entry : processorProperties.entrySet()) {
        PropertyDescriptor property = entry.getKey();
        if (property.isDynamic() && property.isExpressionLanguageSupported()) {
            String dynamicValue = context.getProperty(property).evaluateAttributeExpressions().getValue();
            generatedAttributes.put(property.getName(), dynamicValue);
        }
    }

    for (int i = 0; i < context.getProperty(BATCH_SIZE).asInteger(); i++) {
        FlowFile flowFile = session.create();
        if (data.length > 0) {
            flowFile = session.write(flowFile, new OutputStreamCallback() {
                @Override
                public void process(final OutputStream out) throws IOException {
                    out.write(data);
                }
            });
        }
        flowFile = session.putAllAttributes(flowFile, generatedAttributes);

        session.getProvenanceReporter().create(flowFile);
        session.transfer(flowFile, SUCCESS);
    }
}
 
Example #16
Source File: GetTwitter.java    From localization_nifi with Apache License 2.0 5 votes vote down vote up
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    final Event event = eventQueue.poll();
    if (event != null) {
        switch (event.getEventType()) {
            case STOPPED_BY_ERROR:
                getLogger().error("Received error {}: {} due to {}. Will not attempt to reconnect", new Object[]{event.getEventType(), event.getMessage(), event.getUnderlyingException()});
                break;
            case CONNECTION_ERROR:
            case HTTP_ERROR:
                getLogger().error("Received error {}: {}. Will attempt to reconnect", new Object[]{event.getEventType(), event.getMessage()});
                client.reconnect();
                break;
            default:
                break;
        }
    }

    final String tweet = messageQueue.poll();
    if (tweet == null) {
        context.yield();
        return;
    }

    FlowFile flowFile = session.create();
    flowFile = session.write(flowFile, new OutputStreamCallback() {
        @Override
        public void process(final OutputStream out) throws IOException {
            out.write(tweet.getBytes(StandardCharsets.UTF_8));
        }
    });

    final Map<String, String> attributes = new HashMap<>();
    attributes.put(CoreAttributes.MIME_TYPE.key(), "application/json");
    attributes.put(CoreAttributes.FILENAME.key(), flowFile.getAttribute(CoreAttributes.FILENAME.key()) + ".json");
    flowFile = session.putAllAttributes(flowFile, attributes);

    session.transfer(flowFile, REL_SUCCESS);
    session.getProvenanceReporter().receive(flowFile, Constants.STREAM_HOST + client.getEndpoint().getURI().toString());
}
 
Example #17
Source File: PublishAMQP.java    From nifi with Apache License 2.0 5 votes vote down vote up
/**
 * Extracts contents of the {@link FlowFile} as byte array.
 */
private byte[] extractMessage(FlowFile flowFile, ProcessSession session){
    final byte[] messageContent = new byte[(int) flowFile.getSize()];
    session.read(flowFile, new InputStreamCallback() {
        @Override
        public void process(final InputStream in) throws IOException {
            StreamUtils.fillBuffer(in, messageContent, true);
        }
    });
    return messageContent;
}
 
Example #18
Source File: ElasticsearchRestProcessor.java    From nifi with Apache License 2.0 5 votes vote down vote up
default String getQuery(FlowFile input, ProcessContext context, ProcessSession session) throws IOException {
    String retVal = null;
    if (context.getProperty(QUERY).isSet()) {
        retVal = context.getProperty(QUERY).evaluateAttributeExpressions(input).getValue();
    } else if (input != null) {
        ByteArrayOutputStream out = new ByteArrayOutputStream();
        session.exportTo(input, out);
        out.close();

        retVal = new String(out.toByteArray());
    }

    return retVal;
}
 
Example #19
Source File: ConsumeKafka.java    From nifi with Apache License 2.0 5 votes vote down vote up
@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {

    lastTriggeredTimestamp = System.currentTimeMillis();

    final ConsumerPool pool = getConsumerPool(context);
    if (pool == null) {
        context.yield();
        return;
    }

    try (final ConsumerLease lease = pool.obtainConsumer(session)) {
        if (lease == null) {
            context.yield();
            return;
        }

        activeLeases.add(lease);
        try {
            while (this.isScheduled() && lease.continuePolling()) {
                lease.poll();
            }
            if (this.isScheduled() && !lease.commit()) {
                context.yield();
            }
        } catch (final WakeupException we) {
            getLogger().warn("Was interrupted while trying to communicate with Kafka with lease {}. "
                + "Will roll back session and discard any partially received data.", new Object[] {lease});
        } catch (final KafkaException kex) {
            getLogger().error("Exception while interacting with Kafka so will close the lease {} due to {}",
                new Object[] {lease, kex}, kex);
        } catch (final Throwable t) {
            getLogger().error("Exception while processing data from kafka so will close the lease {} due to {}",
                new Object[] {lease, t}, t);
        } finally {
            activeLeases.remove(lease);
        }
    }
}
 
Example #20
Source File: PutCassandraQL.java    From nifi with Apache License 2.0 5 votes vote down vote up
/**
 * Determines the CQL statement that should be executed for the given FlowFile
 *
 * @param session  the session that can be used to access the given FlowFile
 * @param flowFile the FlowFile whose CQL statement should be executed
 * @return the CQL that is associated with the given FlowFile
 */

private String getCQL(final ProcessSession session, final FlowFile flowFile, final Charset charset) {
    // Read the CQL from the FlowFile's content
    final byte[] buffer = new byte[(int) flowFile.getSize()];
    session.read(flowFile, new InputStreamCallback() {
        @Override
        public void process(final InputStream in) throws IOException {
            StreamUtils.fillBuffer(in, buffer);
        }
    });

    // Create the PreparedStatement string to use for this FlowFile.
    return new String(buffer, charset);
}
 
Example #21
Source File: MalformedChunkHandler.java    From nifi with Apache License 2.0 5 votes vote down vote up
public void handle(FlowFile original, ProcessSession processSession, String chunkName, byte[] badChunk) {
    FlowFile flowFile = processSession.create(original);
    flowFile = processSession.putAttribute(flowFile, CoreAttributes.FILENAME.key(), chunkName);
    flowFile = processSession.putAttribute(flowFile, CoreAttributes.MIME_TYPE.key(), MediaType.APPLICATION_BINARY.toString());
    flowFile = processSession.write(flowFile, out -> out.write(badChunk));
    processSession.transfer(flowFile, badChunkRelationship);
}
 
Example #22
Source File: PutSlackTest.java    From nifi with Apache License 2.0 5 votes vote down vote up
@Test
public void testValidDynamicPropertiesWithExpressionLanguage() {
    ProcessSession session = testRunner.getProcessSessionFactory().createSession();
    FlowFile ff = session.create();
    Map<String, String> props = new HashMap<>();
    props.put("foo", "\"bar\"");
    props.put("ping", "pong");
    ff = session.putAllAttributes(ff, props);

    testRunner.setProperty(PutSlack.WEBHOOK_URL, server.getUrl());
    testRunner.setProperty(PutSlack.WEBHOOK_TEXT, WEBHOOK_TEST_TEXT);
    PropertyDescriptor dynamicProp = new PropertyDescriptor.Builder()
            .dynamic(true)
            .name("foo")
            .build();
    testRunner.setProperty(dynamicProp, "{\"foo\": ${foo}, \"ping\":\"${ping}\"}");

    testRunner.enqueue(ff);
    testRunner.run(1);
    testRunner.assertTransferCount(PutSlack.REL_SUCCESS, 1);
}
 
Example #23
Source File: AbstractListenEventProcessor.java    From nifi with Apache License 2.0 5 votes vote down vote up
/**
 * If pollErrorQueue is true, the error queue will be checked first and event will be
 * returned from the error queue if available.
 *
 * If pollErrorQueue is false, or no data is in the error queue, the regular queue is polled.
 *
 * If longPoll is true, the regular queue will be polled with a short timeout, otherwise it will
 * poll with no timeout which will return immediately.
 *
 * @param longPoll whether or not to poll the main queue with a small timeout
 * @param pollErrorQueue whether or not to poll the error queue first
 *
 * @return an event from one of the queues, or null if none are available
 */
protected E getMessage(final boolean longPoll, final boolean pollErrorQueue, final ProcessSession session) {
    E event = null;
    if (pollErrorQueue) {
        event = errorEvents.poll();
    }

    if (event != null) {
        return event;
    }

    try {
        if (longPoll) {
            event = events.poll(getLongPollTimeout(), TimeUnit.MILLISECONDS);
        } else {
            event = events.poll();
        }
    } catch (InterruptedException e) {
        Thread.currentThread().interrupt();
        return null;
    }

    if (event != null) {
        session.adjustCounter("Messages Received", 1L, false);
    }

    return event;
}
 
Example #24
Source File: TerminateAll.java    From nifi with Apache License 2.0 5 votes vote down vote up
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    FlowFile flowFile;
    while ((flowFile = session.get()) != null) {
        session.remove(flowFile);
        session.adjustCounter("Removed", 1, false);
    }
}
 
Example #25
Source File: ConsumerLease.java    From nifi with Apache License 2.0 5 votes vote down vote up
private void handleParseFailure(final ConsumerRecord<byte[], byte[]> consumerRecord, final ProcessSession session, final Exception cause, final String message) {
    // If we are unable to parse the data, we need to transfer it to 'parse failure' relationship
    final Map<String, String> attributes = getAttributes(consumerRecord);
    attributes.put(KafkaProcessorUtils.KAFKA_OFFSET, String.valueOf(consumerRecord.offset()));
    attributes.put(KafkaProcessorUtils.KAFKA_TIMESTAMP, String.valueOf(consumerRecord.timestamp()));
    attributes.put(KafkaProcessorUtils.KAFKA_PARTITION, String.valueOf(consumerRecord.partition()));
    attributes.put(KafkaProcessorUtils.KAFKA_TOPIC, consumerRecord.topic());

    FlowFile failureFlowFile = session.create();

    final byte[] value = consumerRecord.value();
    if (value != null) {
        failureFlowFile = session.write(failureFlowFile, out -> out.write(value));
    }
    failureFlowFile = session.putAllAttributes(failureFlowFile, attributes);

    final String transitUri = KafkaProcessorUtils.buildTransitURI(securityProtocol, bootstrapServers, consumerRecord.topic());
    session.getProvenanceReporter().receive(failureFlowFile, transitUri);

    session.transfer(failureFlowFile, REL_PARSE_FAILURE);

    if (cause == null) {
        logger.error(message);
    } else {
        logger.error(message, cause);
    }

    session.adjustCounter("Parse Failures", 1, false);
}
 
Example #26
Source File: GetIgniteCache.java    From nifi with Apache License 2.0 5 votes vote down vote up
/**
 * Handle flow file and gets the entry from the cache based on the key attribute
 */
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    FlowFile flowFile = session.get();

    if (flowFile == null) {
        return;
    }

    String key = context.getProperty(IGNITE_CACHE_ENTRY_KEY).evaluateAttributeExpressions(flowFile).getValue();
    if ( StringUtils.isEmpty(key) ) {
        flowFile = session.putAttribute(flowFile, IGNITE_GET_FAILED_REASON_ATTRIBUTE_KEY, IGNITE_GET_FAILED_MISSING_KEY_MESSAGE);
        session.transfer(flowFile, REL_FAILURE);
    } else {
        try {
            byte [] value = getIgniteCache().get(key);
            if ( value == null || value.length == 0 ) {
                flowFile = session.putAttribute(flowFile, IGNITE_GET_FAILED_REASON_ATTRIBUTE_KEY,
                    IGNITE_GET_FAILED_MISSING_ENTRY_MESSAGE);
                session.transfer(flowFile, REL_FAILURE);
            } else {
                ByteArrayInputStream bais = new ByteArrayInputStream(value);
                flowFile = session.importFrom(bais, flowFile);
                session.transfer(flowFile,REL_SUCCESS);
            }
        } catch(Exception exception) {
            flowFile = session.putAttribute(flowFile, IGNITE_GET_FAILED_REASON_ATTRIBUTE_KEY,
                 IGNITE_GET_FAILED_MESSAGE_PREFIX + exception);
            getLogger().error("Failed to get value for key {} from IgniteDB due to {}", new Object[] { key, exception }, exception);
            session.transfer(flowFile, REL_FAILURE);
            context.yield();
        }
    }
}
 
Example #27
Source File: ConsumeKafka_0_10.java    From localization_nifi with Apache License 2.0 5 votes vote down vote up
@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
    final ConsumerPool pool = getConsumerPool(context);
    if (pool == null) {
        context.yield();
        return;
    }

    try (final ConsumerLease lease = pool.obtainConsumer(session)) {
        if (lease == null) {
            context.yield();
            return;
        }

        activeLeases.add(lease);
        try {
            while (this.isScheduled() && lease.continuePolling()) {
                lease.poll();
            }
            if (this.isScheduled() && !lease.commit()) {
                context.yield();
            }
        } catch (final WakeupException we) {
            getLogger().warn("Was interrupted while trying to communicate with Kafka with lease {}. "
                + "Will roll back session and discard any partially received data.", new Object[] {lease});
        } catch (final KafkaException kex) {
            getLogger().error("Exception while interacting with Kafka so will close the lease {} due to {}",
                    new Object[]{lease, kex}, kex);
        } catch (final Throwable t) {
            getLogger().error("Exception while processing data from kafka so will close the lease {} due to {}",
                    new Object[]{lease, t}, t);
        } finally {
            activeLeases.remove(lease);
        }
    }
}
 
Example #28
Source File: ConsumerLease.java    From nifi with Apache License 2.0 5 votes vote down vote up
private void writeDemarcatedData(final ProcessSession session, final List<ConsumerRecord<byte[], byte[]>> records, final TopicPartition topicPartition) {
    final ConsumerRecord<byte[], byte[]> firstRecord = records.get(0);
    final boolean demarcateFirstRecord;
    final BundleInformation bundleInfo = new BundleInformation(topicPartition, null);
    BundleTracker tracker = bundleMap.get(bundleInfo);
    FlowFile flowFile;
    if (tracker == null) {
        tracker = new BundleTracker(firstRecord, topicPartition, keyEncoding);
        flowFile = session.create();
        tracker.updateFlowFile(flowFile);
        demarcateFirstRecord = false; //have not yet written records for this topic/partition in this lease
    } else {
        demarcateFirstRecord = true; //have already been writing records for this topic/partition in this lease
    }
    flowFile = tracker.flowFile;

    tracker.incrementRecordCount(records.size());
    flowFile = session.append(flowFile, out -> {
        boolean useDemarcator = demarcateFirstRecord;
        for (final ConsumerRecord<byte[], byte[]> record : records) {
            if (useDemarcator) {
                out.write(demarcatorBytes);
            }
            final byte[] value = record.value();
            if (value != null) {
                out.write(record.value());
            }
            useDemarcator = true;
        }
    });
    tracker.updateFlowFile(flowFile);
    bundleMap.put(bundleInfo, tracker);
}
 
Example #29
Source File: FlowFileTable.java    From nifi with Apache License 2.0 5 votes vote down vote up
/**
 * Creates a FlowFile table.
 */
public FlowFileTable(final ProcessSession session, final FlowFile flowFile, final RecordSchema schema, final RecordReaderFactory recordReaderFactory, final ComponentLog logger) {
    this.session = session;
    this.flowFile = flowFile;
    this.recordSchema = schema;
    this.recordReaderFactory = recordReaderFactory;
    this.logger = logger;
}
 
Example #30
Source File: GenerateFlowFile.java    From nifi with Apache License 2.0 5 votes vote down vote up
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    final int numFlowFiles = context.getProperty(BATCH_SIZE).asInteger();

    for (int i=0; i < numFlowFiles; i++) {
        final FlowFile flowFile = createFlowFile(context, session);
        session.transfer(flowFile, REL_SUCCESS);
    }
}