Java Code Examples for org.apache.nifi.flowfile.FlowFile

The following examples show how to use org.apache.nifi.flowfile.FlowFile. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: localization_nifi   Source File: SFTPTransfer.java    License: Apache License 2.0 7 votes vote down vote up
@Override
public InputStream getInputStream(final String remoteFileName, final FlowFile flowFile) throws IOException {
    final ChannelSftp sftp = getChannel(flowFile);
    try {
        return sftp.get(remoteFileName);
    } catch (final SftpException e) {
        switch (e.id) {
            case ChannelSftp.SSH_FX_NO_SUCH_FILE:
                throw new FileNotFoundException("Could not find file " + remoteFileName + " on remote SFTP Server");
            case ChannelSftp.SSH_FX_PERMISSION_DENIED:
                throw new PermissionDeniedException("Insufficient permissions to read file " + remoteFileName + " from remote SFTP Server", e);
            default:
                throw new IOException("Failed to obtain file content for " + remoteFileName, e);
        }
    }
}
 
Example 2
Source Project: nifi   Source File: TestAttributesToJSON.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testAttribute_singleUserDefinedAttribute() throws Exception {
    final TestRunner testRunner = TestRunners.newTestRunner(new AttributesToJSON());
    testRunner.setProperty(AttributesToJSON.ATTRIBUTES_LIST, TEST_ATTRIBUTE_KEY);
    testRunner.setProperty(AttributesToJSON.DESTINATION, AttributesToJSON.DESTINATION_ATTRIBUTE);

    ProcessSession session = testRunner.getProcessSessionFactory().createSession();
    FlowFile ff = session.create();
    ff = session.putAttribute(ff, TEST_ATTRIBUTE_KEY, TEST_ATTRIBUTE_VALUE);

    testRunner.enqueue(ff);
    testRunner.run();

    testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).get(0)
            .assertAttributeExists(AttributesToJSON.JSON_ATTRIBUTE_NAME);
    testRunner.assertTransferCount(AttributesToJSON.REL_SUCCESS, 1);
    testRunner.assertTransferCount(AttributesToJSON.REL_FAILURE, 0);

    String json = testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS)
            .get(0).getAttribute(AttributesToJSON.JSON_ATTRIBUTE_NAME);

    ObjectMapper mapper = new ObjectMapper();
    Map<String, String> val = mapper.readValue(json, HashMap.class);
    assertTrue(val.get(TEST_ATTRIBUTE_KEY).equals(TEST_ATTRIBUTE_VALUE));
    assertTrue(val.size() == 1);
}
 
Example 3
Source Project: nifi   Source File: MockProcessSession.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public MockFlowFile putAttribute(FlowFile flowFile, final String attrName, final String attrValue) {
    flowFile = validateState(flowFile);
    if (attrName == null || attrValue == null || flowFile == null) {
        throw new IllegalArgumentException("argument cannot be null");
    }
    if (!(flowFile instanceof MockFlowFile)) {
        throw new IllegalArgumentException("Cannot update attributes of a flow file that I did not create");
    }

    if ("uuid".equals(attrName)) {
        Assert.fail("Should not be attempting to set FlowFile UUID via putAttribute. This will be ignored in production");
    }

    final MockFlowFile mock = (MockFlowFile) flowFile;
    final MockFlowFile newFlowFile = new MockFlowFile(mock.getId(), flowFile);
    currentVersions.put(newFlowFile.getId(), newFlowFile);

    final Map<String, String> attrs = new HashMap<>();
    attrs.put(attrName, attrValue);
    newFlowFile.putAttributes(attrs);
    return newFlowFile;
}
 
Example 4
Source Project: nifi   Source File: StandardProcessSession.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public FlowFile putAllAttributes(FlowFile flowFile, final Map<String, String> attributes) {
    verifyTaskActive();

    flowFile = validateRecordState(flowFile);
    final StandardRepositoryRecord record = getRecord(flowFile);

    final Map<String, String> updatedAttributes;
    if (attributes.containsKey(CoreAttributes.UUID.key())) {
        updatedAttributes = new HashMap<>(attributes);
        updatedAttributes.remove(CoreAttributes.UUID.key());
    } else {
        updatedAttributes = attributes;
    }

    final StandardFlowFileRecord.Builder ffBuilder = new StandardFlowFileRecord.Builder().fromFlowFile(record.getCurrent()).addAttributes(updatedAttributes);
    final FlowFileRecord newFile = ffBuilder.build();

    record.setWorking(newFile, updatedAttributes);

    return newFile;
}
 
Example 5
Source Project: localization_nifi   Source File: PublishKafka.java    License: Apache License 2.0 6 votes vote down vote up
private byte[] getMessageKey(final FlowFile flowFile, final ProcessContext context) {
    if (context.getProperty(MESSAGE_DEMARCATOR).isSet()) {
        return null;
    }

    final String uninterpretedKey;
    if (context.getProperty(KEY).isSet()) {
        uninterpretedKey = context.getProperty(KEY).evaluateAttributeExpressions(flowFile).getValue();
    } else {
        uninterpretedKey = flowFile.getAttribute(KafkaProcessorUtils.KAFKA_KEY);
    }

    if (uninterpretedKey == null) {
        return null;
    }

    final String keyEncoding = context.getProperty(KEY_ATTRIBUTE_ENCODING).getValue();
    if (UTF8_ENCODING.getValue().equals(keyEncoding)) {
        return uninterpretedKey.getBytes(StandardCharsets.UTF_8);
    }

    return DatatypeConverter.parseHexBinary(uninterpretedKey);
}
 
Example 6
Source Project: nifi   Source File: DistributeLoad.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public Relationship mapToRelationship(final ProcessContext context, final FlowFile flowFile) {
    final List<Relationship> relationshipList = DistributeLoad.this.weightedRelationshipListRef.get();
    final int numRelationships = relationshipList.size();

    // create a HashSet that contains all of the available relationships, as calling #contains on HashSet
    // is much faster than calling it on a List
    boolean foundFreeRelationship = false;
    Relationship relationship = null;

    int attempts = 0;
    while (!foundFreeRelationship) {
        final long counterValue = counter.getAndIncrement();
        final int idx = (int) (counterValue % numRelationships);
        relationship = relationshipList.get(idx);
        foundFreeRelationship = context.getAvailableRelationships().contains(relationship);
        if (++attempts % numRelationships == 0 && !foundFreeRelationship) {
            return null;
        }
    }

    return relationship;
}
 
Example 7
Source Project: nifi   Source File: FTPTransfer.java    License: Apache License 2.0 6 votes vote down vote up
public void sendCommands(final List<String> commands, final FlowFile flowFile) throws IOException {
    if (commands.isEmpty()) {
        return;
    }

    final FTPClient client = getClient(flowFile);
    for (String cmd : commands) {
        if (!cmd.isEmpty()) {
            int result;
            result = client.sendCommand(cmd);
            logger.debug(this + " sent command to the FTP server: " + cmd + " for " + flowFile);

            if (FTPReply.isNegativePermanent(result) || FTPReply.isNegativeTransient(result)) {
                throw new IOException(this + " negative reply back from FTP server cmd: " + cmd + " reply:" + result + ": " + client.getReplyString() + " for " + flowFile);
            }
        }
    }
}
 
Example 8
Source Project: nifi   Source File: AbstractBaseKinesisProcessor.java    License: Apache License 2.0 6 votes vote down vote up
protected List<FlowFile> filterMessagesByMaxSize(final ProcessSession session, final int batchSize, final long maxBufferSizeBytes, String message) {
    List<FlowFile> flowFiles = new ArrayList<FlowFile>(batchSize);

    long currentBufferSizeBytes = 0;

    for (int i = 0; (i < batchSize) && (currentBufferSizeBytes <= maxBufferSizeBytes); i++) {

        FlowFile flowFileCandidate = session.get();
        if ( flowFileCandidate == null )
            break;

        if (flowFileCandidate.getSize() > MAX_MESSAGE_SIZE) {
            flowFileCandidate = handleFlowFileTooBig(session, flowFileCandidate, message);
            continue;
        }

        currentBufferSizeBytes += flowFileCandidate.getSize();

        flowFiles.add(flowFileCandidate);
    }
    return flowFiles;
}
 
Example 9
Source Project: nifi   Source File: StandardProvenanceReporter.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void join(final Collection<FlowFile> parents, final FlowFile child, final String details, final long joinDuration) {
    verifyFlowFileKnown(child);

    try {
        final ProvenanceEventBuilder eventBuilder = build(child, ProvenanceEventType.JOIN);
        eventBuilder.addChildFlowFile(child);
        eventBuilder.setDetails(details);

        for (final FlowFile parent : parents) {
            eventBuilder.addParentFlowFile(parent);
        }

        events.add(eventBuilder.build());
    } catch (final Exception e) {
        logger.error("Failed to generate Provenance Event due to " + e);
        if (logger.isDebugEnabled()) {
            logger.error("", e);
        }
    }
}
 
Example 10
Source Project: nifi   Source File: DeleteRowsWriter.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Creates and transfers a new flow file whose contents are the JSON-serialized value of the specified event, and the sequence ID attribute set
 *
 * @param session   A reference to a ProcessSession from which the flow file(s) will be created and transferred
 * @param eventInfo An event whose value will become the contents of the flow file
 * @return The next available CDC sequence ID for use by the CDC processor
 */
@Override
public long writeEvent(final ProcessSession session, String transitUri, final DeleteRowsEventInfo eventInfo, final long currentSequenceId, Relationship relationship) {
    final AtomicLong seqId = new AtomicLong(currentSequenceId);
    for (Serializable[] row : eventInfo.getRows()) {

        FlowFile flowFile = session.create();
        flowFile = session.write(flowFile, outputStream -> {

            super.startJson(outputStream, eventInfo);
            super.writeJson(eventInfo);

            final BitSet bitSet = eventInfo.getIncludedColumns();
            writeRow(eventInfo, row, bitSet);

            super.endJson();
        });

        flowFile = session.putAllAttributes(flowFile, getCommonAttributes(seqId.get(), eventInfo));
        session.transfer(flowFile, relationship);
        session.getProvenanceReporter().receive(flowFile, transitUri);
        seqId.getAndIncrement();
    }
    return seqId.get();
}
 
Example 11
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    ComponentLog logger = getLogger();
    FlowFile flowFile = session.get();

    // Create the flowfile, as it probably does not exist
    if (flowFile == null)
        flowFile = session.create();

    // Get the data
    String data = generateData(context.getProperty(PRINT_HEADER).asBoolean(), context.getProperty(LONG_TIMESTAMP).asBoolean(), context.getProperty(TIMEZONE).toString(), context.getProperty(DATA_FORMAT).getValue(),
            context.getProperty(JSON_DEVICE_TYPE).getValue(),context.getProperty(DEVICE_NAME).getValue());

    // Write the results back out to flow file
    try{
        flowFile = session.write(flowFile, out -> out.write(data.getBytes()));
        session.getProvenanceReporter().create(flowFile);
        session.transfer(flowFile, SUCCESS);
    } catch (ProcessException ex) {
        logger.error("Unable to write generated data out to flowfile. Error: ", ex);
    }
}
 
Example 12
Source Project: localization_nifi   Source File: MockProcessSession.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public MockFlowFile merge(final Collection<FlowFile> sources, final FlowFile destination) {
    for (final FlowFile source : sources) {
        validateState(source);
    }
    validateState(destination);
    final ByteArrayOutputStream baos = new ByteArrayOutputStream();
    for (final FlowFile flowFile : sources) {
        final MockFlowFile mock = (MockFlowFile) flowFile;
        final byte[] data = mock.getData();
        try {
            baos.write(data);
        } catch (final IOException e) {
            throw new AssertionError("Failed to write to BAOS");
        }
    }

    final MockFlowFile newFlowFile = new MockFlowFile(destination.getId(), destination);
    newFlowFile.setData(baos.toByteArray());
    currentVersions.put(newFlowFile.getId(), newFlowFile);

    return newFlowFile;
}
 
Example 13
Source Project: nifi   Source File: ParseNetflowv5.java    License: Apache License 2.0 6 votes vote down vote up
private void generateKV(final List<FlowFile> multipleRecords, final ProcessSession session, final FlowFile flowFile, final Map<String, String> attributes, final Netflowv5Parser parser,
        final int processedRecord) {
    int numberOfRecords = processedRecord;
    generateHeaderAttributes(attributes, parser);

    final String[] fieldname = getRecordFields();
    int record = 0;
    FlowFile recordFlowFile = flowFile;
    while (numberOfRecords-- > 0) {
        // Process KVs of the Flow Record fields
        final Object[] fieldvalue = parser.getRecordData()[record++];
        for (int i = 0; i < fieldname.length; i++) {
            attributes.put("netflowv5.record." + fieldname[i], String.valueOf(fieldvalue[i]));
        }
        recordFlowFile = session.create(flowFile);
        recordFlowFile = session.putAllAttributes(recordFlowFile, attributes);
        multipleRecords.add(recordFlowFile);
    }
}
 
Example 14
Source Project: nifi   Source File: TestPublishKafkaRecord_1_0.java    License: Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
@Test
public void testMultipleFailures() throws IOException {
    final Set<FlowFile> flowFiles = new HashSet<>();
    flowFiles.add(runner.enqueue("John Doe, 48"));
    flowFiles.add(runner.enqueue("John Doe, 48"));
    flowFiles.add(runner.enqueue("John Doe, 48"));

    when(mockLease.complete()).thenReturn(createFailurePublishResult(flowFiles));

    runner.run();
    runner.assertAllFlowFilesTransferred(PublishKafkaRecord_1_0.REL_FAILURE, 3);

    verify(mockLease, times(3)).publish(any(FlowFile.class), any(RecordSet.class), any(RecordSetWriterFactory.class),
            AdditionalMatchers.or(any(RecordSchema.class), isNull()), eq(null), eq(TOPIC_NAME), nullable(Function.class));
    verify(mockLease, times(1)).complete();
    verify(mockLease, times(1)).close();
}
 
Example 15
Source Project: nifi   Source File: StandardProcessSession.java    License: Apache License 2.0 5 votes vote down vote up
private void decrementReadCount(final FlowFile flowFile) {
    final Integer count = readRecursionSet.get(flowFile);
    if (count == null) {
        return;
    }

    final int updatedCount = count - 1;
    if (updatedCount == 0) {
        readRecursionSet.remove(flowFile);
    } else {
        readRecursionSet.put(flowFile, updatedCount);
    }
}
 
Example 16
Source Project: localization_nifi   Source File: MergeContent.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public Map<String, String> getMergedAttributes(final List<FlowFile> flowFiles) {
    final Map<String, String> result = new HashMap<>();

    //trivial cases
    if (flowFiles == null || flowFiles.isEmpty()) {
        return result;
    } else if (flowFiles.size() == 1) {
        result.putAll(flowFiles.iterator().next().getAttributes());
    }

    /*
     * Start with the first attribute map and only put an entry to the
     * resultant map if it is common to every map.
     */
    final Map<String, String> firstMap = flowFiles.iterator().next().getAttributes();

    outer:
    for (final Map.Entry<String, String> mapEntry : firstMap.entrySet()) {
        final String key = mapEntry.getKey();
        final String value = mapEntry.getValue();

        for (final FlowFile flowFile : flowFiles) {
            final Map<String, String> currMap = flowFile.getAttributes();
            final String curVal = currMap.get(key);
            if (curVal == null || !curVal.equals(value)) {
                continue outer;
            }
        }
        result.put(key, value);
    }

    // Never copy the UUID from the parents - which could happen if we don't remove it and there is only 1 parent.
    result.remove(CoreAttributes.UUID.key());
    return result;
}
 
Example 17
Source Project: nifi   Source File: TerminateFlowFile.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }

    session.remove(flowFile);
}
 
Example 18
Source Project: nifi   Source File: StandardProvenanceReporter.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void fork(final FlowFile parent, final Collection<FlowFile> children, final String details, final long forkDuration) {
    verifyFlowFileKnown(parent);

    try {
        final ProvenanceEventBuilder eventBuilder = build(parent, ProvenanceEventType.FORK);
        eventBuilder.addParentFlowFile(parent);
        for (final FlowFile child : children) {
            eventBuilder.addChildFlowFile(child);
        }

        if (forkDuration > -1L) {
            eventBuilder.setEventDuration(forkDuration);
        }

        if (details != null) {
            eventBuilder.setDetails(details);
        }

        events.add(eventBuilder.build());
    } catch (final Exception e) {
        logger.error("Failed to generate Provenance Event due to " + e);
        if (logger.isDebugEnabled()) {
            logger.error("", e);
        }
    }
}
 
Example 19
Source Project: nifi   Source File: ConsumeMQTT.java    License: Apache License 2.0 5 votes vote down vote up
private void transferQueue(ProcessSession session){
    while (!mqttQueue.isEmpty()) {
        FlowFile messageFlowfile = session.create();
        final MQTTQueueMessage mqttMessage = mqttQueue.peek();

        Map<String, String> attrs = new HashMap<>();
        attrs.put(BROKER_ATTRIBUTE_KEY, broker);
        attrs.put(TOPIC_ATTRIBUTE_KEY, mqttMessage.getTopic());
        attrs.put(QOS_ATTRIBUTE_KEY, String.valueOf(mqttMessage.getQos()));
        attrs.put(IS_DUPLICATE_ATTRIBUTE_KEY, String.valueOf(mqttMessage.isDuplicate()));
        attrs.put(IS_RETAINED_ATTRIBUTE_KEY, String.valueOf(mqttMessage.isRetained()));

        messageFlowfile = session.putAllAttributes(messageFlowfile, attrs);

        messageFlowfile = session.write(messageFlowfile, new OutputStreamCallback() {
            @Override
            public void process(final OutputStream out) throws IOException {
                out.write(mqttMessage.getPayload());
            }
        });

        String transitUri = new StringBuilder(broker).append(mqttMessage.getTopic()).toString();
        session.getProvenanceReporter().receive(messageFlowfile, transitUri);
        session.transfer(messageFlowfile, REL_MESSAGE);
        session.commit();
        if (!mqttQueue.remove(mqttMessage) && logger.isWarnEnabled()) {
            logger.warn(new StringBuilder("FlowFile ")
                    .append(messageFlowfile.getAttribute(CoreAttributes.UUID.key()))
                    .append(" for Mqtt message ")
                    .append(mqttMessage)
                    .append(" had already been removed from queue, possible duplication of flow files")
                    .toString());
        }
    }
}
 
Example 20
Source Project: nifi   Source File: UpdateAttributeNoEL.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }

    for (final String propertyName : context.getAllProperties().keySet()) {
        final String value = context.getProperty(propertyName).getValue();
        flowFile = session.putAttribute(flowFile, propertyName, value);
    }

    session.transfer(flowFile, REL_SUCCESS);
}
 
Example 21
Source Project: nifi   Source File: PutKudu.java    License: Apache License 2.0 5 votes vote down vote up
private String getEvaluatedProperty(PropertyDescriptor property, ProcessContext context, FlowFile flowFile) {
    PropertyValue evaluatedProperty = context.getProperty(property).evaluateAttributeExpressions(flowFile);
    if (property.isRequired() && evaluatedProperty == null) {
        throw new ProcessException(String.format("Property `%s` is required but evaluated to null", property.getDisplayName()));
    }
    return evaluatedProperty.getValue();
}
 
Example 22
Source Project: nifi   Source File: StatelessProcessSession.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public StatelessFlowFile putAllAttributes(FlowFile flowFile, final Map<String, String> attrs) {
    flowFile = validateState(flowFile);
    if (attrs == null || flowFile == null) {
        throw new IllegalArgumentException("argument cannot be null");
    }
    if (!(flowFile instanceof StatelessFlowFile)) {
        throw new IllegalArgumentException("Cannot update attributes of a flow file that I did not create");
    }
    final StatelessFlowFile newFlowFile = new StatelessFlowFile((StatelessFlowFile) flowFile, this.materializeContent);
    currentVersions.put(newFlowFile.getId(), newFlowFile);

    newFlowFile.putAttributes(attrs);
    return newFlowFile;
}
 
Example 23
Source Project: localization_nifi   Source File: MockProvenanceReporter.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void fork(final FlowFile parent, final Collection<FlowFile> children, final String details, final long forkDuration) {
    verifyFlowFileKnown(parent);

    try {
        final ProvenanceEventBuilder eventBuilder = build(parent, ProvenanceEventType.FORK);
        eventBuilder.addParentFlowFile(parent);
        for (final FlowFile child : children) {
            eventBuilder.addChildFlowFile(child);
        }

        if (forkDuration > -1L) {
            eventBuilder.setEventDuration(forkDuration);
        }

        if (details != null) {
            eventBuilder.setDetails(details);
        }

        events.add(eventBuilder.build());
    } catch (final Exception e) {
        logger.error("Failed to generate Provenance Event due to " + e);
        if (logger.isDebugEnabled()) {
            logger.error("", e);
        }
    }
}
 
Example 24
Source Project: nifi   Source File: InFlightMessageTracker.java    License: Apache License 2.0 5 votes vote down vote up
private boolean isComplete(final FlowFile flowFile) {
    final Counts counts = messageCountsByFlowFile.get(flowFile);
    if (counts.getAcknowledgedCount() == counts.getSentCount()) {
        // all messages received successfully.
        return true;
    }

    if (failures.containsKey(flowFile)) {
        // FlowFile failed so is complete
        return true;
    }

    return false;
}
 
Example 25
@Test
@Ignore
public void testManyFilesOpened() throws IOException {

    StandardProcessSession[] standardProcessSessions = new StandardProcessSession[100000];
    for (int i = 0; i < 70000; i++) {
        standardProcessSessions[i] = new StandardProcessSession(context);

        FlowFile flowFile = standardProcessSessions[i].create();
        final byte[] buff = new byte["Hello".getBytes().length];

        flowFile = standardProcessSessions[i].append(flowFile, new OutputStreamCallback() {
            @Override
            public void process(OutputStream out) throws IOException {
                out.write("Hello".getBytes());
            }
        });

        try {
            standardProcessSessions[i].read(flowFile, false, new InputStreamCallback() {
                @Override
                public void process(final InputStream in) throws IOException {
                    StreamUtils.fillBuffer(in, buff);
                }
            });
        } catch (Exception e) {
            System.out.println("Failed at file:" + i);
            throw e;
        }
        if (i % 1000 == 0) {
            System.out.println("i:" + i);
        }
    }
}
 
Example 26
Source Project: nifi   Source File: MockProcessSession.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public List<FlowFile> get(final int maxResults) {
    final List<FlowFile> flowFiles = new ArrayList<>(Math.min(500, maxResults));
    for (int i = 0; i < maxResults; i++) {
        final MockFlowFile nextFlowFile = get();
        if (nextFlowFile == null) {
            return flowFiles;
        }

        flowFiles.add(nextFlowFile);
    }

    return flowFiles;
}
 
Example 27
Source Project: nifi   Source File: GetKafka.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Will release flow file. Releasing of the flow file in the context of this
 * operation implies the following:
 *
 * If Empty then remove from session and return
 * If has something then transfer to REL_SUCCESS
 */
private void releaseFlowFile(FlowFile flowFile, ProcessSession session, Map<String, String> attributes, long start, String topic, int msgCount){
    if (flowFile.getSize() == 0L) {
        session.remove(flowFile);
    } else {
        flowFile = session.putAllAttributes(flowFile, attributes);
        final long millis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start);
        session.getProvenanceReporter().receive(flowFile, "kafka://" + topic, "Received " + msgCount + " Kafka messages", millis);
        getLogger().info("Successfully received {} from Kafka with {} messages in {} millis", new Object[]{flowFile, msgCount, millis});
        session.transfer(flowFile, REL_SUCCESS);
    }
}
 
Example 28
Source Project: nifi   Source File: ExecuteInfluxDBQuery.java    License: Apache License 2.0 5 votes vote down vote up
protected FlowFile populateErrorAttributes(final ProcessSession session, FlowFile flowFile, String query,
        String message) {
    Map<String,String> attributes = new HashMap<>();
    attributes.put(INFLUX_DB_ERROR_MESSAGE, String.valueOf(message));
    attributes.put(INFLUX_DB_EXECUTED_QUERY, String.valueOf(query));
    flowFile = session.putAllAttributes(flowFile, attributes);
    return flowFile;
}
 
Example 29
Source Project: nifi   Source File: StatelessFlowFile.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public boolean equals(final Object obj) {
    if (obj == null) {
        return false;
    }
    if (obj == this) {
        return true;
    }
    if (obj instanceof FlowFile) {
        return ((FlowFile) obj).getId() == this.id;
    }
    return false;
}
 
Example 30
@Override
public String evaluate(final FlowFile flowFile, final AttributeValueDecorator decorator) throws ProcessException {
    final ValueLookup lookup = new ValueLookup(variableRegistry, flowFile);
    final Object evaluationResult = query.evaluate(lookup).getValue();
    if (evaluationResult == null) {
        return "";
    }

    String result = evaluationResult.toString();
    if (decorator != null) {
        result = decorator.decorate(result);
    }
    return Query.unescape(result);
}