Java Code Examples for org.apache.nifi.processor.ProcessSession#putAttribute()

The following examples show how to use org.apache.nifi.processor.ProcessSession#putAttribute() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestAttributesToJSON.java    From nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void testAttribute_singleUserDefinedAttribute() throws Exception {
    final TestRunner testRunner = TestRunners.newTestRunner(new AttributesToJSON());
    testRunner.setProperty(AttributesToJSON.ATTRIBUTES_LIST, TEST_ATTRIBUTE_KEY);
    testRunner.setProperty(AttributesToJSON.DESTINATION, AttributesToJSON.DESTINATION_ATTRIBUTE);

    ProcessSession session = testRunner.getProcessSessionFactory().createSession();
    FlowFile ff = session.create();
    ff = session.putAttribute(ff, TEST_ATTRIBUTE_KEY, TEST_ATTRIBUTE_VALUE);

    testRunner.enqueue(ff);
    testRunner.run();

    testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).get(0)
            .assertAttributeExists(AttributesToJSON.JSON_ATTRIBUTE_NAME);
    testRunner.assertTransferCount(AttributesToJSON.REL_SUCCESS, 1);
    testRunner.assertTransferCount(AttributesToJSON.REL_FAILURE, 0);

    String json = testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS)
            .get(0).getAttribute(AttributesToJSON.JSON_ATTRIBUTE_NAME);

    ObjectMapper mapper = new ObjectMapper();
    Map<String, String> val = mapper.readValue(json, HashMap.class);
    assertTrue(val.get(TEST_ATTRIBUTE_KEY).equals(TEST_ATTRIBUTE_VALUE));
    assertTrue(val.size() == 1);
}
 
Example 2
Source File: TestAttributesToJSON.java    From nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void testAttribute_noIncludeCoreAttributesContent() throws IOException {
    final TestRunner testRunner = TestRunners.newTestRunner(new AttributesToJSON());
    testRunner.setProperty(AttributesToJSON.INCLUDE_CORE_ATTRIBUTES, "false");
    testRunner.setProperty(AttributesToJSON.DESTINATION, AttributesToJSON.DESTINATION_CONTENT);

    ProcessSession session = testRunner.getProcessSessionFactory().createSession();
    FlowFile ff = session.create();
    ff = session.putAttribute(ff, TEST_ATTRIBUTE_KEY, TEST_ATTRIBUTE_VALUE);
    ff = session.putAttribute(ff, CoreAttributes.PATH.key(), TEST_ATTRIBUTE_VALUE);

    testRunner.enqueue(ff);
    testRunner.run();

    testRunner.assertTransferCount(AttributesToJSON.REL_SUCCESS, 1);
    testRunner.assertTransferCount(AttributesToJSON.REL_FAILURE, 0);

    ObjectMapper mapper = new ObjectMapper();
    Map<String, String> val = mapper.readValue(testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).get(0).toByteArray(), HashMap.class);
    assertEquals(TEST_ATTRIBUTE_VALUE, val.get(TEST_ATTRIBUTE_KEY));
    assertEquals(1, val.size());
}
 
Example 3
Source File: HashAttribute.java    From localization_nifi with Apache License 2.0 6 votes vote down vote up
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) {
    FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }

    final Map<String, Pattern> patterns = regexMapRef.get();
    final ComponentLog logger = getLogger();

    final SortedMap<String, String> attributes = getRelevantAttributes(flowFile, patterns);
    if (attributes.size() != patterns.size()) {
        final Set<String> wantedKeys = patterns.keySet();
        final Set<String> foundKeys = attributes.keySet();
        final StringBuilder missingKeys = new StringBuilder();
        for (final String wantedKey : wantedKeys) {
            if (!foundKeys.contains(wantedKey)) {
                missingKeys.append(wantedKey).append(" ");
            }
        }

        logger.error("routing {} to 'failure' because of missing attributes: {}", new Object[]{flowFile, missingKeys.toString()});
        session.transfer(flowFile, REL_FAILURE);
    } else {
        // create single string of attribute key/value pairs to use for group ID hash
        final StringBuilder hashableValue = new StringBuilder();
        for (final Map.Entry<String, String> entry : attributes.entrySet()) {
            hashableValue.append(entry.getKey());
            if (StringUtils.isBlank(entry.getValue())) {
                hashableValue.append("EMPTY");
            } else {
                hashableValue.append(entry.getValue());
            }
        }

        // create group ID
        final String hashValue = DigestUtils.md5Hex(hashableValue.toString());

        logger.info("adding Hash Value {} to attributes for {} and routing to success", new Object[]{hashValue, flowFile});
        flowFile = session.putAttribute(flowFile, context.getProperty(HASH_VALUE_ATTRIBUTE).getValue(), hashValue);
        session.getProvenanceReporter().modifyAttributes(flowFile);
        session.transfer(flowFile, REL_SUCCESS);
    }
}
 
Example 4
Source File: AbstractDynamoDBProcessor.java    From nifi with Apache License 2.0 5 votes vote down vote up
/**
 * Send unhandled items to failure and remove the flow files from key to flow file map
 * @param session used for sending the flow file
 * @param keysToFlowFileMap - ItemKeys to flow file map
 * @param hashKeyValue the items hash key value
 * @param rangeKeyValue the items hash key value
 */
protected void sendUnprocessedToUnprocessedRelationship(final ProcessSession session, Map<ItemKeys, FlowFile> keysToFlowFileMap, Object hashKeyValue, Object rangeKeyValue) {
    ItemKeys itemKeys = new ItemKeys(hashKeyValue, rangeKeyValue);

    FlowFile flowFile = keysToFlowFileMap.get(itemKeys);
    flowFile = session.putAttribute(flowFile, DYNAMODB_KEY_ERROR_UNPROCESSED, itemKeys.toString());
    session.transfer(flowFile,REL_UNPROCESSED);

    getLogger().error("Unprocessed key " + itemKeys + " for flow file " + flowFile);

    keysToFlowFileMap.remove(itemKeys);
}
 
Example 5
Source File: TestAttributesToJSON.java    From nifi with Apache License 2.0 5 votes vote down vote up
@Test
public void testAttribute_singleNonExistingUserDefinedAttribute() throws Exception {
    final TestRunner testRunner = TestRunners.newTestRunner(new AttributesToJSON());
    testRunner.setProperty(AttributesToJSON.ATTRIBUTES_LIST, "NonExistingAttribute");
    testRunner.setProperty(AttributesToJSON.DESTINATION, AttributesToJSON.DESTINATION_ATTRIBUTE);

    ProcessSession session = testRunner.getProcessSessionFactory().createSession();
    FlowFile ff = session.create();
    ff = session.putAttribute(ff, TEST_ATTRIBUTE_KEY, TEST_ATTRIBUTE_VALUE);

    testRunner.enqueue(ff);
    testRunner.run();

    testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).get(0)
            .assertAttributeExists(AttributesToJSON.JSON_ATTRIBUTE_NAME);
    testRunner.assertTransferCount(AttributesToJSON.REL_SUCCESS, 1);
    testRunner.assertTransferCount(AttributesToJSON.REL_FAILURE, 0);

    String json = testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS)
            .get(0).getAttribute(AttributesToJSON.JSON_ATTRIBUTE_NAME);

    ObjectMapper mapper = new ObjectMapper();
    Map<String, String> val = mapper.readValue(json, HashMap.class);

    //If a Attribute is requested but does not exist then it is placed in the JSON with an empty string
    assertTrue(val.get("NonExistingAttribute").equals(""));
    assertTrue(val.size() == 1);
}
 
Example 6
Source File: AbstractDynamoDBProcessor.java    From localization_nifi with Apache License 2.0 5 votes vote down vote up
protected List<FlowFile> processException(final ProcessSession session, List<FlowFile> flowFiles, Exception exception) {
    List<FlowFile> failedFlowFiles = new ArrayList<>();
    for (FlowFile flowFile : flowFiles) {
        flowFile = session.putAttribute(flowFile, DYNAMODB_ERROR_EXCEPTION_MESSAGE, exception.getMessage() );
        failedFlowFiles.add(flowFile);
    }
    return failedFlowFiles;
}
 
Example 7
Source File: TestStandardProcessorTestRunner.java    From localization_nifi with Apache License 2.0 5 votes vote down vote up
@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
    FlowFile ff = session.create();
    if (counter % 2 == 0) {
        ff = session.putAttribute(ff, KEY, "value");
        session.transfer(ff, REL_SUCCESS);
    } else {
        session.transfer(ff, REL_FAILURE);
    }
    counter++;
}
 
Example 8
Source File: AttributesToJSON.java    From localization_nifi with Apache License 2.0 5 votes vote down vote up
@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
    final FlowFile original = session.get();
    if (original == null) {
        return;
    }

    final Map<String, String> atrList = buildAttributesMapForFlowFile(original, attributes, attributesToRemove, nullValueForEmptyString);

    try {
        if (destinationContent) {
            FlowFile conFlowfile = session.write(original, (in, out) -> {
                try (OutputStream outputStream = new BufferedOutputStream(out)) {
                    outputStream.write(objectMapper.writeValueAsBytes(atrList));
                }
            });
            conFlowfile = session.putAttribute(conFlowfile, CoreAttributes.MIME_TYPE.key(), APPLICATION_JSON);
            session.transfer(conFlowfile, REL_SUCCESS);
        } else {
            FlowFile atFlowfile = session.putAttribute(original, JSON_ATTRIBUTE_NAME, objectMapper.writeValueAsString(atrList));
            session.transfer(atFlowfile, REL_SUCCESS);
        }
    } catch (JsonProcessingException e) {
        getLogger().error(e.getMessage());
        session.transfer(original, REL_FAILURE);
    }
}
 
Example 9
Source File: AttributesToJSON.java    From nifi with Apache License 2.0 5 votes vote down vote up
@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
    final FlowFile original = session.get();
    if (original == null) {
        return;
    }

    final Map<String, String> atrList = buildAttributesMapForFlowFile(original, attributes, attributesToRemove, nullValueForEmptyString, pattern);

    try {
        if (destinationContent) {
            FlowFile conFlowfile = session.write(original, (in, out) -> {
                try (OutputStream outputStream = new BufferedOutputStream(out)) {
                    outputStream.write(objectMapper.writeValueAsBytes(atrList));
                }
            });
            conFlowfile = session.putAttribute(conFlowfile, CoreAttributes.MIME_TYPE.key(), APPLICATION_JSON);
            session.transfer(conFlowfile, REL_SUCCESS);
        } else {
            FlowFile atFlowfile = session.putAttribute(original, JSON_ATTRIBUTE_NAME, objectMapper.writeValueAsString(atrList));
            session.transfer(atFlowfile, REL_SUCCESS);
        }
    } catch (JsonProcessingException e) {
        getLogger().error(e.getMessage());
        session.transfer(original, REL_FAILURE);
    }
}
 
Example 10
Source File: FetchElasticsearch5.java    From localization_nifi with Apache License 2.0 4 votes vote down vote up
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {

    synchronized (esClient) {
        if(esClient.get() == null) {
            super.setup(context);
        }
    }

    FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }

    final String index = context.getProperty(INDEX).evaluateAttributeExpressions(flowFile).getValue();
    final String docId = context.getProperty(DOC_ID).evaluateAttributeExpressions(flowFile).getValue();
    final String docType = context.getProperty(TYPE).evaluateAttributeExpressions(flowFile).getValue();
    final Charset charset = Charset.forName(context.getProperty(CHARSET).evaluateAttributeExpressions(flowFile).getValue());

    final ComponentLog logger = getLogger();
    try {

        logger.debug("Fetching {}/{}/{} from Elasticsearch", new Object[]{index, docType, docId});
        GetRequestBuilder getRequestBuilder = esClient.get().prepareGet(index, docType, docId);
        final GetResponse getResponse = getRequestBuilder.execute().actionGet();

        if (getResponse == null || !getResponse.isExists()) {
            logger.warn("Failed to read {}/{}/{} from Elasticsearch: Document not found",
                    new Object[]{index, docType, docId});

            // We couldn't find the document, so penalize it and send it to "not found"
            flowFile = session.penalize(flowFile);
            session.transfer(flowFile, REL_NOT_FOUND);
        } else {
            flowFile = session.putAllAttributes(flowFile, new HashMap<String, String>() {{
                put("filename", docId);
                put("es.index", index);
                put("es.type", docType);
            }});
            flowFile = session.write(flowFile, new OutputStreamCallback() {
                @Override
                public void process(OutputStream out) throws IOException {
                    out.write(getResponse.getSourceAsString().getBytes(charset));
                }
            });
            logger.debug("Elasticsearch document " + docId + " fetched, routing to success");
            // The document is JSON, so update the MIME type of the flow file
            flowFile = session.putAttribute(flowFile, CoreAttributes.MIME_TYPE.key(), "application/json");
            session.getProvenanceReporter().fetch(flowFile, getResponse.remoteAddress().getAddress());
            session.transfer(flowFile, REL_SUCCESS);
        }
    } catch (NoNodeAvailableException
            | ElasticsearchTimeoutException
            | ReceiveTimeoutTransportException
            | NodeClosedException exceptionToRetry) {
        logger.error("Failed to read into Elasticsearch due to {}, this may indicate an error in configuration "
                        + "(hosts, username/password, etc.), or this issue may be transient. Routing to retry",
                new Object[]{exceptionToRetry.getLocalizedMessage()}, exceptionToRetry);
        session.transfer(flowFile, REL_RETRY);
        context.yield();

    } catch (Exception e) {
        logger.error("Failed to read {} from Elasticsearch due to {}", new Object[]{flowFile, e.getLocalizedMessage()}, e);
        session.transfer(flowFile, REL_FAILURE);
        context.yield();
    }
}
 
Example 11
Source File: GetHTMLElement.java    From nifi with Apache License 2.0 4 votes vote down vote up
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    final FlowFile flowFile = session.get();
    if ( flowFile == null ) {
        return;
    }

    final Document doc;
    final Elements eles;

    try {
        doc = parseHTMLDocumentFromFlowfile(flowFile, context, session);
        eles = doc.select(context.getProperty(CSS_SELECTOR).evaluateAttributeExpressions(flowFile).getValue());
    } catch (final Exception ex) {
        getLogger().error("Failed to extract HTML from {} due to {}; routing to {}", new Object[] {flowFile, ex, REL_INVALID_HTML}, ex);
        session.transfer(flowFile, REL_INVALID_HTML);
        return;
    }

    final String prependValue = context.getProperty(PREPEND_ELEMENT_VALUE).evaluateAttributeExpressions(flowFile).getValue();
    final String appendValue = context.getProperty(APPEND_ELEMENT_VALUE).evaluateAttributeExpressions(flowFile).getValue();
    final String outputType = context.getProperty(OUTPUT_TYPE).getValue();
    final String attributeKey = context.getProperty(ATTRIBUTE_KEY).evaluateAttributeExpressions(flowFile).getValue();

    if (eles == null || eles.isEmpty()) {
        // No element found
        session.transfer(flowFile, REL_NOT_FOUND);
    } else {
        // Create a new FlowFile for each matching element.
        for (final Element ele : eles) {
            final String extractedElementValue = extractElementValue(prependValue, outputType, appendValue, ele, attributeKey);

            final FlowFile ff = session.create(flowFile);
            FlowFile updatedFF = ff;

            switch (context.getProperty(DESTINATION).getValue()) {
                case DESTINATION_ATTRIBUTE:
                    updatedFF = session.putAttribute(ff, HTML_ELEMENT_ATTRIBUTE_NAME, extractedElementValue);
                    break;
                case DESTINATION_CONTENT:
                    updatedFF = session.write(ff, new StreamCallback() {
                        @Override
                        public void process(final InputStream inputStream, final OutputStream outputStream) throws IOException {
                            outputStream.write(extractedElementValue.getBytes(StandardCharsets.UTF_8));
                        }
                    });

                    break;
            }

            session.transfer(updatedFF, REL_SUCCESS);
        }

        // Transfer the original HTML
        session.transfer(flowFile, REL_ORIGINAL);
    }
}
 
Example 12
Source File: MergeContent.java    From nifi with Apache License 2.0 4 votes vote down vote up
@Override
public FlowFile merge(final Bin bin, final ProcessContext context) {
    final ProcessSession session = bin.getSession();
    final List<FlowFile> contents = bin.getContents();

    FlowFile bundle = session.create(contents);

    try {
        bundle = session.write(bundle, new OutputStreamCallback() {
            @Override
            public void process(final OutputStream rawOut) throws IOException {
                try (final OutputStream bufferedOut = new BufferedOutputStream(rawOut)) {
                    // we don't want the packager closing the stream. V1 creates a TAR Output Stream, which then gets
                    // closed, which in turn closes the underlying OutputStream, and we want to protect ourselves against that.
                    final OutputStream out = new NonCloseableOutputStream(bufferedOut);

                    for (final FlowFile flowFile : contents) {
                        bin.getSession().read(flowFile, false, new InputStreamCallback() {
                            @Override
                            public void process(final InputStream rawIn) throws IOException {
                                try (final InputStream in = new BufferedInputStream(rawIn)) {
                                    final Map<String, String> attributes = new HashMap<>(flowFile.getAttributes());

                                    // for backward compatibility purposes, we add the "legacy" NiFi attributes
                                    attributes.put("nf.file.name", attributes.get(CoreAttributes.FILENAME.key()));
                                    attributes.put("nf.file.path", attributes.get(CoreAttributes.PATH.key()));
                                    if (attributes.containsKey(CoreAttributes.MIME_TYPE.key())) {
                                        attributes.put("content-type", attributes.get(CoreAttributes.MIME_TYPE.key()));
                                    }
                                    packager.packageFlowFile(in, out, attributes, flowFile.getSize());
                                }
                            }
                        });
                    }
                }
            }
        });
    } catch (final Exception e) {
        session.remove(bundle);
        throw e;
    }

    bundle = session.putAttribute(bundle, CoreAttributes.FILENAME.key(), createFilename(contents) + ".pkg");
    session.getProvenanceReporter().join(contents, bundle);
    return bundle;
}
 
Example 13
Source File: PutWebSocket.java    From localization_nifi with Apache License 2.0 4 votes vote down vote up
private FlowFile transferToFailure(final ProcessSession processSession, FlowFile flowfile, final String value) {
    flowfile = processSession.putAttribute(flowfile, ATTR_WS_FAILURE_DETAIL, value);
    processSession.transfer(flowfile, REL_FAILURE);
    return flowfile;
}
 
Example 14
Source File: FetchDistributedMapCache.java    From localization_nifi with Apache License 2.0 4 votes vote down vote up
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }

    final ComponentLog logger = getLogger();
    final String cacheKey = context.getProperty(PROP_CACHE_ENTRY_IDENTIFIER).evaluateAttributeExpressions(flowFile).getValue();
    if (StringUtils.isBlank(cacheKey)) {
        logger.error("FlowFile {} has no attribute for given Cache Entry Identifier", new Object[]{flowFile});
        flowFile = session.penalize(flowFile);
        session.transfer(flowFile, REL_FAILURE);
        return;
    }
    final DistributedMapCacheClient cache = context.getProperty(PROP_DISTRIBUTED_CACHE_SERVICE).asControllerService(DistributedMapCacheClient.class);

    try {
        final byte[] cacheValue = cache.get(cacheKey, keySerializer, valueDeserializer);

        if(cacheValue==null){
            session.transfer(flowFile, REL_NOT_FOUND);
            logger.info("Could not find an entry in cache for {}; routing to not-found", new Object[]{flowFile});

        } else {
            boolean putInAttribute = context.getProperty(PROP_PUT_CACHE_VALUE_IN_ATTRIBUTE).isSet();
            if(putInAttribute){
                String attributeName = context.getProperty(PROP_PUT_CACHE_VALUE_IN_ATTRIBUTE).evaluateAttributeExpressions(flowFile).getValue();
                String attributeValue = new String(cacheValue,context.getProperty(PROP_CHARACTER_SET).getValue());

                int maxLength = context.getProperty(PROP_PUT_ATTRIBUTE_MAX_LENGTH).asInteger();
                if(maxLength < attributeValue.length()){
                    attributeValue = attributeValue.substring(0,maxLength);
                }

                flowFile = session.putAttribute(flowFile, attributeName, attributeValue);

            } else {
                flowFile = session.write(flowFile, new OutputStreamCallback() {
                    @Override
                    public void process(OutputStream out) throws IOException {
                        out.write(cacheValue);
                    }
                });
            }

            session.transfer(flowFile, REL_SUCCESS);
            if(putInAttribute){
                logger.info("Found a cache key of {} and added an attribute to {} with it's value.", new Object[]{cacheKey, flowFile});
            }else {
                logger.info("Found a cache key of {} and replaced the contents of {} with it's value.", new Object[]{cacheKey, flowFile});
            }
        }

    } catch (final IOException e) {
        flowFile = session.penalize(flowFile);
        session.transfer(flowFile, REL_FAILURE);
        logger.error("Unable to communicate with cache when processing {} due to {}", new Object[]{flowFile, e});
    }
}
 
Example 15
Source File: JoltTransformJSON.java    From localization_nifi with Apache License 2.0 4 votes vote down vote up
@Override
public void onTrigger(final ProcessContext context, ProcessSession session) throws ProcessException {

    final FlowFile original = session.get();
    if (original == null) {
        return;
    }

    final ComponentLog logger = getLogger();
    final StopWatch stopWatch = new StopWatch(true);

    final byte[] originalContent = new byte[(int) original.getSize()];
    session.read(original, new InputStreamCallback() {
        @Override
        public void process(final InputStream in) throws IOException {
            StreamUtils.fillBuffer(in, originalContent, true);
        }
    });

    final String jsonString;
    final ClassLoader originalContextClassLoader = Thread.currentThread().getContextClassLoader();

    try {

        final String specString;

        if(context.getProperty(JOLT_SPEC).isSet() && !StringUtils.isEmpty(context.getProperty(JOLT_SPEC).getValue())){
            specString = context.isExpressionLanguagePresent(JOLT_SPEC) ? context.getProperty(JOLT_SPEC).evaluateAttributeExpressions(original).getValue() :
                         context.getProperty(JOLT_SPEC).getValue();
        }else{
            specString = null;
        }

        if(transform == null || (specString != null && !specJsonString.equals(specString)) || (specString == null && SORTR.getValue().equals(context.getProperty(JOLT_TRANSFORM).getValue()))){

            specJsonString = specString;

            final Object specJson;
            if(context.getProperty(JOLT_SPEC).isSet() && !SORTR.getValue().equals(context.getProperty(JOLT_TRANSFORM).getValue())){
                specJson = JsonUtils.jsonToObject(specJsonString, DEFAULT_CHARSET);
            }else{
                specJson = null;
            }

            if(CUSTOMR.getValue().equals(context.getProperty(JOLT_TRANSFORM).getValue())){
                transform = TransformFactory.getCustomTransform(customClassLoader,context.getProperty(CUSTOM_CLASS).getValue(), specJson);
            }else {
                transform = TransformFactory.getTransform(customClassLoader, context.getProperty(JOLT_TRANSFORM).getValue(), specJson);
            }
        }

        if(customClassLoader != null) {
            Thread.currentThread().setContextClassLoader(customClassLoader);
        }

        final ByteArrayInputStream bais = new ByteArrayInputStream(originalContent);
        final Object inputJson = JsonUtils.jsonToObject(bais);
        final Object transformedJson = TransformUtils.transform(transform,inputJson);
        jsonString = JsonUtils.toJsonString(transformedJson);

    } catch (Exception ex) {
        logger.error("Unable to transform {} due to {}", new Object[]{original, ex});
        session.transfer(original, REL_FAILURE);
        return;

    }finally {
        if(customClassLoader != null && originalContextClassLoader != null) {
            Thread.currentThread().setContextClassLoader(originalContextClassLoader);
        }
    }

    FlowFile transformed = session.write(original, new OutputStreamCallback() {
        @Override
        public void process(OutputStream out) throws IOException {
            out.write(jsonString.getBytes(DEFAULT_CHARSET));
        }
    });

    final String transformType = context.getProperty(JOLT_TRANSFORM).getValue();
    transformed = session.putAttribute(transformed, CoreAttributes.MIME_TYPE.key(),"application/json");
    session.transfer(transformed, REL_SUCCESS);
    session.getProvenanceReporter().modifyContent(transformed,"Modified With " + transformType ,stopWatch.getElapsed(TimeUnit.MILLISECONDS));
    logger.info("Transformed {}", new Object[]{original});

}
 
Example 16
Source File: MergeContent.java    From localization_nifi with Apache License 2.0 4 votes vote down vote up
@Override
public FlowFile merge(final Bin bin, final ProcessContext context) {
    final boolean keepPath = context.getProperty(KEEP_PATH).asBoolean();

    final ProcessSession session = bin.getSession();
    final List<FlowFile> contents = bin.getContents();
    unmerged.addAll(contents);

    FlowFile bundle = session.create(contents);

    bundle = session.putAttribute(bundle, CoreAttributes.FILENAME.key(), createFilename(contents) + ".zip");
    bundle = session.write(bundle, new OutputStreamCallback() {
        @Override
        public void process(final OutputStream rawOut) throws IOException {
            try (final OutputStream bufferedOut = new BufferedOutputStream(rawOut);
                    final ZipOutputStream out = new ZipOutputStream(bufferedOut)) {
                out.setLevel(compressionLevel);
                for (final FlowFile flowFile : contents) {
                    final String path = keepPath ? getPath(flowFile) : "";
                    final String entryName = path + flowFile.getAttribute(CoreAttributes.FILENAME.key());
                    final ZipEntry zipEntry = new ZipEntry(entryName);
                    zipEntry.setSize(flowFile.getSize());
                    try {
                        out.putNextEntry(zipEntry);

                        bin.getSession().exportTo(flowFile, out);
                        out.closeEntry();
                        unmerged.remove(flowFile);
                    } catch (ZipException e) {
                        getLogger().error("Encountered exception merging {}", new Object[]{flowFile}, e);
                    }
                }

                out.finish();
                out.flush();
            }
        }
    });

    session.getProvenanceReporter().join(contents, bundle);
    return bundle;
}
 
Example 17
Source File: InvokeGRPC.java    From nifi with Apache License 2.0 4 votes vote down vote up
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    FlowFile fileToProcess = null;
    if (context.hasIncomingConnection()) {
        fileToProcess = session.get();

        // If we have no FlowFile, and all incoming connections are self-loops then we can continue on.
        // However, if we have no FlowFile and we have connections coming from other Processors, then
        // we know that we should run only if we have a FlowFile.
        if (fileToProcess == null && context.hasNonLoopConnection()) {
            return;
        }
    }

    final ComponentLog logger = getLogger();
    final FlowFileServiceGrpc.FlowFileServiceBlockingStub blockingStub = blockingStubReference.get();
    final String host = context.getProperty(PROP_SERVICE_HOST).getValue();
    final String port = context.getProperty(PROP_SERVICE_PORT).getValue();
    fileToProcess = session.putAttribute(fileToProcess, SERVICE_HOST, host);
    fileToProcess = session.putAttribute(fileToProcess, SERVICE_PORT, port);
    FlowFile responseFlowFile = null;
    try {
        final FlowFileRequest.Builder requestBuilder = FlowFileRequest.newBuilder()
                .setId(fileToProcess.getId())
                .putAllAttributes(fileToProcess.getAttributes());

        // if the processor is configured to send the content, turn the content into bytes
        // and add it to the request.
        final boolean sendContent = context.getProperty(PROP_SEND_CONTENT).asBoolean();
        if (sendContent) {
            try (final InputStream contents = session.read(fileToProcess)) {
                requestBuilder.setContent(ByteString.readFrom(contents));
            }
            // emit provenance event
            session.getProvenanceReporter().send(fileToProcess, getRemote(host, port), true);
        }
        final FlowFileRequest flowFileRequest = requestBuilder.build();
        logRequest(logger, host, port, flowFileRequest);

        final FlowFileReply flowFileReply = blockingStub.send(flowFileRequest);
        logReply(logger, host, port, flowFileReply);

        final FlowFileReply.ResponseCode responseCode = flowFileReply.getResponseCode();
        final String body = flowFileReply.getBody();

        fileToProcess = session.putAttribute(fileToProcess, RESPONSE_CODE, String.valueOf(responseCode));
        fileToProcess = session.putAttribute(fileToProcess, RESPONSE_BODY, body);

        responseFlowFile = session.create(fileToProcess);
        route(fileToProcess, responseFlowFile, session, context, responseCode);

    } catch (final Exception e) {
        // penalize or yield
        if (fileToProcess != null) {
            logger.error("Routing to {} due to exception: {}", new Object[]{REL_FAILURE.getName(), e}, e);
            fileToProcess = session.penalize(fileToProcess);
            fileToProcess = session.putAttribute(fileToProcess, EXCEPTION_CLASS, e.getClass().getName());
            fileToProcess = session.putAttribute(fileToProcess, EXCEPTION_MESSAGE, e.getMessage());
            // transfer original to failure
            session.transfer(fileToProcess, REL_FAILURE);
        } else {
            logger.error("Yielding processor due to exception encountered as a source processor: {}", e);
            context.yield();
        }

        // cleanup
        try {
            if (responseFlowFile != null) {
                session.remove(responseFlowFile);
            }
        } catch (final Exception e1) {
            logger.error("Could not cleanup response flowfile due to exception: {}", new Object[]{e1}, e1);
        }
    }
}
 
Example 18
Source File: ModifyHTMLElement.java    From nifi with Apache License 2.0 4 votes vote down vote up
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    final FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }

    final Document doc;
    final Elements eles;
    try {
        doc = parseHTMLDocumentFromFlowfile(flowFile, context, session);
        eles = doc.select(context.getProperty(CSS_SELECTOR).evaluateAttributeExpressions(flowFile).getValue());
    } catch (Exception ex) {
        getLogger().error("Failed to extract HTML from {} due to {}; routing to {}", new Object[] {flowFile, ex.toString(), REL_INVALID_HTML.getName()}, ex);
        session.transfer(flowFile, REL_INVALID_HTML);
        return;
    }

    final String modifiedValue = context.getProperty(MODIFIED_VALUE).evaluateAttributeExpressions(flowFile).getValue();

    if (eles == null || eles.size() == 0) {
        // No element found
        session.transfer(flowFile, REL_NOT_FOUND);
    } else {
        for (Element ele : eles) {
            switch (context.getProperty(OUTPUT_TYPE).getValue()) {
                case ELEMENT_HTML:
                    ele.html(modifiedValue);
                    break;
                case ELEMENT_ATTRIBUTE:
                    ele.attr(context.getProperty(ATTRIBUTE_KEY).evaluateAttributeExpressions(flowFile).getValue(), modifiedValue);
                    break;
                case ELEMENT_TEXT:
                    ele.text(modifiedValue);
                    break;
            }
        }

        FlowFile ff = session.write(session.create(flowFile), new StreamCallback() {
            @Override
            public void process(InputStream in, OutputStream out) throws IOException {
                out.write(doc.html().getBytes(StandardCharsets.UTF_8));
            }
        });
        ff = session.putAttribute(ff, NUM_ELEMENTS_MODIFIED_ATTR, new Integer(eles.size()).toString());
        session.transfer(ff, REL_SUCCESS);

        // Transfer the original HTML
        session.transfer(flowFile, REL_ORIGINAL);
    }
}
 
Example 19
Source File: PutDistributedMapCache.java    From nifi with Apache License 2.0 4 votes vote down vote up
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {

    FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }

    final ComponentLog logger = getLogger();

    // cache key is computed from attribute 'CACHE_ENTRY_IDENTIFIER' with expression language support
    final String cacheKey = context.getProperty(CACHE_ENTRY_IDENTIFIER).evaluateAttributeExpressions(flowFile).getValue();

    // if the computed value is null, or empty, we transfer the flow file to failure relationship
    if (StringUtils.isBlank(cacheKey)) {
        logger.error("FlowFile {} has no attribute for given Cache Entry Identifier", new Object[] {flowFile});
        flowFile = session.penalize(flowFile);
        session.transfer(flowFile, REL_FAILURE);
        return;
    }

    // the cache client used to interact with the distributed cache
    final DistributedMapCacheClient cache = context.getProperty(DISTRIBUTED_CACHE_SERVICE).asControllerService(DistributedMapCacheClient.class);

    try {

        final long maxCacheEntrySize = context.getProperty(CACHE_ENTRY_MAX_BYTES).asDataSize(DataUnit.B).longValue();
        long flowFileSize = flowFile.getSize();

        // too big flow file
        if (flowFileSize > maxCacheEntrySize) {
            logger.warn("Flow file {} size {} exceeds the max cache entry size ({} B).", new Object[] {flowFile, flowFileSize, maxCacheEntrySize});
            session.transfer(flowFile, REL_FAILURE);
            return;
        }

        if (flowFileSize == 0) {
            logger.warn("Flow file {} is empty, there is nothing to cache.", new Object[] {flowFile});
            session.transfer(flowFile, REL_FAILURE);
            return;

        }

        // get flow file content
        final ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
        session.exportTo(flowFile, byteStream);
        byte[] cacheValue = byteStream.toByteArray();
        final String updateStrategy = context.getProperty(CACHE_UPDATE_STRATEGY).getValue();
        boolean cached = false;

        if (updateStrategy.equals(CACHE_UPDATE_REPLACE.getValue())) {
            cache.put(cacheKey, cacheValue, keySerializer, valueSerializer);
            cached = true;
        } else if (updateStrategy.equals(CACHE_UPDATE_KEEP_ORIGINAL.getValue())) {
            final byte[] oldValue = cache.getAndPutIfAbsent(cacheKey, cacheValue, keySerializer, valueSerializer, valueDeserializer);
            if (oldValue == null) {
                cached = true;
            }
        }

        // set 'cached' attribute
        flowFile = session.putAttribute(flowFile, CACHED_ATTRIBUTE_NAME, String.valueOf(cached));

        if (cached) {
            session.transfer(flowFile, REL_SUCCESS);
        } else {
            session.transfer(flowFile, REL_FAILURE);
        }

    } catch (final IOException e) {
        flowFile = session.penalize(flowFile);
        session.transfer(flowFile, REL_FAILURE);
        logger.error("Unable to communicate with cache when processing {} due to {}", new Object[] {flowFile, e});
    }
}
 
Example 20
Source File: RouteHL7.java    From nifi with Apache License 2.0 4 votes vote down vote up
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }

    final Charset charset = Charset.forName(context.getProperty(CHARACTER_SET).evaluateAttributeExpressions(flowFile).getValue());

    final byte[] buffer = new byte[(int) flowFile.getSize()];
    session.read(flowFile, new InputStreamCallback() {
        @Override
        public void process(final InputStream in) throws IOException {
            StreamUtils.fillBuffer(in, buffer);
        }
    });

    @SuppressWarnings("resource")
    final HapiContext hapiContext = new DefaultHapiContext();
    hapiContext.setValidationContext((ca.uhn.hl7v2.validation.ValidationContext) ValidationContextFactory.noValidation());

    final PipeParser parser = hapiContext.getPipeParser();
    final String hl7Text = new String(buffer, charset);
    final HL7Message message;
    try {
        final Message hapiMessage = parser.parse(hl7Text);
        message = new HapiMessage(hapiMessage);
    } catch (final Exception e) {
        getLogger().error("Failed to parse {} as HL7 due to {}; routing to failure", new Object[]{flowFile, e});
        session.transfer(flowFile, REL_FAILURE);
        return;
    }

    final Set<String> matchingRels = new HashSet<>();
    final Map<Relationship, HL7Query> queryMap = queries;
    for (final Map.Entry<Relationship, HL7Query> entry : queryMap.entrySet()) {
        final Relationship relationship = entry.getKey();
        final HL7Query query = entry.getValue();

        final QueryResult result = query.evaluate(message);
        if (result.isMatch()) {
            FlowFile clone = session.clone(flowFile);
            clone = session.putAttribute(clone, "RouteHL7.Route", relationship.getName());
            session.transfer(clone, relationship);
            session.getProvenanceReporter().route(clone, relationship);
            matchingRels.add(relationship.getName());
        }
    }

    session.transfer(flowFile, REL_ORIGINAL);
    getLogger().info("Routed a copy of {} to {} relationships: {}", new Object[]{flowFile, matchingRels.size(), matchingRels});
}