Java Code Examples for org.apache.nifi.provenance.serialization.RecordWriter

The following examples show how to use org.apache.nifi.provenance.serialization.RecordWriter. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
protected RecordWriter[] createWriters(final RepositoryConfiguration config, final long initialRecordId) throws IOException {
    final List<File> storageDirectories = new ArrayList<>(config.getStorageDirectories().values());

    final RecordWriter[] writers = new RecordWriter[config.getJournalCount()];
    for (int i = 0; i < config.getJournalCount(); i++) {
        final File storageDirectory = storageDirectories.get(i % storageDirectories.size());
        final File journalDirectory = new File(storageDirectory, "journals");
        final File journalFile = new File(journalDirectory, String.valueOf(initialRecordId) + ".journal." + i);

        writers[i] = RecordWriters.newSchemaRecordWriter(journalFile, idGenerator, false, false);
        writers[i].writeHeader(initialRecordId);
    }

    logger.info("Created new Provenance Event Writers for events starting with ID {}", initialRecordId);
    return writers;
}
 
Example 2
@Override
public synchronized void close() throws IOException {
    this.closed.set(true);
    writeLock.lock();
    try {
        logger.debug("Obtained write lock for close");

        scheduledExecService.shutdownNow();
        rolloverExecutor.shutdownNow();
        queryExecService.shutdownNow();

        getIndexManager().close();

        if (writers != null) {
            for (final RecordWriter writer : writers) {
                writer.close();
            }
        }
    } finally {
        writeLock.unlock();
    }
}
 
Example 3
@Test
public void testSimpleWriteWithToc() throws IOException {
    final File journalFile = new File("target/storage/" + UUID.randomUUID().toString() + "/testSimpleWrite");
    final File tocFile = TocUtil.getTocFile(journalFile);
    final TocWriter tocWriter = new StandardTocWriter(tocFile, false, false);
    final RecordWriter writer = createWriter(journalFile, tocWriter, false, 1024 * 1024);

    writer.writeHeader(1L);
    writer.writeRecord(createEvent());
    writer.close();

    final TocReader tocReader = new StandardTocReader(tocFile);

    try (final FileInputStream fis = new FileInputStream(journalFile);
        final RecordReader reader = createReader(fis, journalFile.getName(), tocReader, 2048)) {
        assertEquals(0, reader.getBlockIndex());
        reader.skipToBlock(0);
        final StandardProvenanceEventRecord recovered = reader.nextRecord();
        assertNotNull(recovered);

        assertEquals("nifi://unit-test", recovered.getTransitUri());
        assertNull(reader.nextRecord());
    }

    FileUtils.deleteFile(journalFile.getParentFile(), true);
}
 
Example 4
@Test
public void testSingleRecordCompressed() throws IOException {
    final File journalFile = new File("target/storage/" + UUID.randomUUID().toString() + "/testSimpleWrite.gz");
    final File tocFile = TocUtil.getTocFile(journalFile);
    final TocWriter tocWriter = new StandardTocWriter(tocFile, false, false);
    final RecordWriter writer = createWriter(journalFile, tocWriter, true, 8192);

    writer.writeHeader(1L);
    writer.writeRecord(createEvent());
    writer.close();

    final TocReader tocReader = new StandardTocReader(tocFile);

    try (final FileInputStream fis = new FileInputStream(journalFile);
        final RecordReader reader = createReader(fis, journalFile.getName(), tocReader, 2048)) {
        assertEquals(0, reader.getBlockIndex());
        reader.skipToBlock(0);
        final StandardProvenanceEventRecord recovered = reader.nextRecord();
        assertNotNull(recovered);

        assertEquals("nifi://unit-test", recovered.getTransitUri());
        assertNull(reader.nextRecord());
    }

    FileUtils.deleteFile(journalFile.getParentFile(), true);
}
 
Example 5
protected RecordWriter[] createWriters(final RepositoryConfiguration config, final long initialRecordId) throws IOException {
    final List<File> storageDirectories = new ArrayList<>(config.getStorageDirectories().values());

    final RecordWriter[] writers = new RecordWriter[config.getJournalCount()];
    for (int i = 0; i < config.getJournalCount(); i++) {
        final File storageDirectory = storageDirectories.get(i % storageDirectories.size());
        final File journalDirectory = new File(storageDirectory, "journals");
        final File journalFile = new File(journalDirectory, String.valueOf(initialRecordId) + ".journal." + i);

        writers[i] = RecordWriters.newSchemaRecordWriter(journalFile, idGenerator, false, false);
        writers[i].writeHeader(initialRecordId);
    }

    logger.info("Created new Provenance Event Writers for events starting with ID {}", initialRecordId);
    return writers;
}
 
Example 6
@Override
public synchronized void close() throws IOException {
    this.closed.set(true);
    writeLock.lock();
    try {
        logger.debug("Obtained write lock for close");

        scheduledExecService.shutdownNow();
        rolloverExecutor.shutdownNow();
        queryExecService.shutdownNow();

        getIndexManager().close();

        if (writers != null) {
            for (final RecordWriter writer : writers) {
                writer.close();
            }
        }
    } finally {
        writeLock.unlock();
    }
}
 
Example 7
Source Project: nifi   Source File: PersistentProvenanceRepository.java    License: Apache License 2.0 6 votes vote down vote up
protected RecordWriter[] createWriters(final RepositoryConfiguration config, final long initialRecordId) throws IOException {
    final List<File> storageDirectories = new ArrayList<>(config.getStorageDirectories().values());

    final RecordWriter[] writers = new RecordWriter[config.getJournalCount()];
    for (int i = 0; i < config.getJournalCount(); i++) {
        final File storageDirectory = storageDirectories.get(i % storageDirectories.size());
        final File journalDirectory = new File(storageDirectory, "journals");
        final File journalFile = new File(journalDirectory, String.valueOf(initialRecordId) + ".journal." + i);

        writers[i] = RecordWriters.newSchemaRecordWriter(journalFile, idGenerator, false, false);
        writers[i].writeHeader(initialRecordId);
    }

    logger.info("Created new Provenance Event Writers for events starting with ID {}", initialRecordId);
    return writers;
}
 
Example 8
Source Project: nifi   Source File: PersistentProvenanceRepository.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public synchronized void close() throws IOException {
    this.closed.set(true);
    writeLock.lock();
    try {
        logger.debug("Obtained write lock for close");

        scheduledExecService.shutdownNow();
        rolloverExecutor.shutdownNow();
        queryExecService.shutdownNow();

        getIndexManager().close();

        if (writers != null) {
            for (final RecordWriter writer : writers) {
                writer.close();
            }
        }
    } finally {
        writeLock.unlock();
    }
}
 
Example 9
Source Project: nifi   Source File: AbstractTestRecordReaderWriter.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testSimpleWriteWithToc() throws IOException {
    final File journalFile = new File("target/storage/" + UUID.randomUUID().toString() + "/testSimpleWrite");
    final File tocFile = TocUtil.getTocFile(journalFile);
    final TocWriter tocWriter = new StandardTocWriter(tocFile, false, false);
    final RecordWriter writer = createWriter(journalFile, tocWriter, false, 1024 * 1024);

    writer.writeHeader(1L);
    writer.writeRecord(createEvent());
    writer.close();

    final TocReader tocReader = new StandardTocReader(tocFile);
    final String expectedTransitUri = "nifi://unit-test";
        final int expectedBlockIndex = 0;

    assertRecoveredRecord(journalFile, tocReader, expectedTransitUri, expectedBlockIndex);

    FileUtils.deleteFile(journalFile.getParentFile(), true);
}
 
Example 10
Source Project: nifi   Source File: AbstractTestRecordReaderWriter.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testSingleRecordCompressed() throws IOException {
    final File journalFile = new File("target/storage/" + UUID.randomUUID().toString() + "/testSimpleWrite.gz");
    final File tocFile = TocUtil.getTocFile(journalFile);
    final TocWriter tocWriter = new StandardTocWriter(tocFile, false, false);
    final RecordWriter writer = createWriter(journalFile, tocWriter, true, 8192);

    writer.writeHeader(1L);
    writer.writeRecord(createEvent());
    writer.close();

    final TocReader tocReader = new StandardTocReader(tocFile);

    assertRecoveredRecord(journalFile, tocReader, "nifi://unit-test", 0);

    FileUtils.deleteFile(journalFile.getParentFile(), true);
}
 
Example 11
private Map<ProvenanceEventRecord, StorageSummary> addEvents(final Iterable<ProvenanceEventRecord> events, final RecordWriter writer) throws IOException {
    final Map<ProvenanceEventRecord, StorageSummary> locationMap = new HashMap<>();

    try {
        long maxId = -1L;
        int numEvents = 0;
        for (final ProvenanceEventRecord nextEvent : events) {
            final StorageSummary writerSummary = writer.writeRecord(nextEvent);
            final StorageSummary summaryWithIndex = new StorageSummary(writerSummary.getEventId(), writerSummary.getStorageLocation(), this.partitionName,
                writerSummary.getBlockIndex(), writerSummary.getSerializedLength(), writerSummary.getBytesWritten());
            locationMap.put(nextEvent, summaryWithIndex);
            maxId = summaryWithIndex.getEventId();
            numEvents++;
        }

        if (numEvents == 0) {
            return locationMap;
        }

        writer.flush();

        // Update max event id to be equal to be the greater of the current value or the
        // max value just written.
        final long maxIdWritten = maxId;
        this.maxEventId.getAndUpdate(cur -> maxIdWritten > cur ? maxIdWritten : cur);

        if (config.isAlwaysSync()) {
            writer.sync();
        }
    } catch (final Exception e) {
        // We need to set the repoDirty flag before we release the lock for this journal.
        // Otherwise, another thread may write to this journal -- this is a problem because
        // the journal contains part of our record but not all of it. Writing to the end of this
        // journal will result in corruption!
        writer.markDirty();
        throw e;
    }

    return locationMap;
}
 
Example 12
@Test
public void testMultipleRecordsSameBlockCompressed() throws IOException {
    final File journalFile = new File("target/storage/" + UUID.randomUUID().toString() + "/testSimpleWrite.gz");
    final File tocFile = TocUtil.getTocFile(journalFile);
    final TocWriter tocWriter = new StandardTocWriter(tocFile, false, false);
    // new record each 1 MB of uncompressed data
    final RecordWriter writer = createWriter(journalFile, tocWriter, true, 1024 * 1024);

    writer.writeHeader(1L);
    for (int i = 0; i < 10; i++) {
        writer.writeRecord(createEvent());
    }
    writer.close();

    final TocReader tocReader = new StandardTocReader(tocFile);

    try (final FileInputStream fis = new FileInputStream(journalFile);
        final RecordReader reader = createReader(fis, journalFile.getName(), tocReader, 2048)) {
        for (int i = 0; i < 10; i++) {
            assertEquals(0, reader.getBlockIndex());

            // call skipToBlock half the time to ensure that we can; avoid calling it
            // the other half of the time to ensure that it's okay.
            if (i <= 5) {
                reader.skipToBlock(0);
            }

            final StandardProvenanceEventRecord recovered = reader.nextRecord();
            assertNotNull(recovered);
            assertEquals("nifi://unit-test", recovered.getTransitUri());
        }

        assertNull(reader.nextRecord());
    }

    FileUtils.deleteFile(journalFile.getParentFile(), true);
}
 
Example 13
@Test
public void testMultipleRecordsMultipleBlocksCompressed() throws IOException {
    final File journalFile = new File("target/storage/" + UUID.randomUUID().toString() + "/testSimpleWrite.gz");
    final File tocFile = TocUtil.getTocFile(journalFile);
    final TocWriter tocWriter = new StandardTocWriter(tocFile, false, false);
    // new block each 10 bytes
    final RecordWriter writer = createWriter(journalFile, tocWriter, true, 100);

    writer.writeHeader(1L);
    for (int i = 0; i < 10; i++) {
        writer.writeRecord(createEvent());
    }
    writer.close();

    final TocReader tocReader = new StandardTocReader(tocFile);

    try (final FileInputStream fis = new FileInputStream(journalFile);
        final RecordReader reader = createReader(fis, journalFile.getName(), tocReader, 2048)) {
        for (int i = 0; i < 10; i++) {
            final StandardProvenanceEventRecord recovered = reader.nextRecord();
            System.out.println(recovered);
            assertNotNull(recovered);
            assertEquals(i, recovered.getEventId());
            assertEquals("nifi://unit-test", recovered.getTransitUri());

            final Map<String, String> updatedAttrs = recovered.getUpdatedAttributes();
            assertNotNull(updatedAttrs);
            assertEquals(2, updatedAttrs.size());
            assertEquals("1.txt", updatedAttrs.get("filename"));
            assertTrue(updatedAttrs.containsKey("uuid"));
        }

        assertNull(reader.nextRecord());
    }

    FileUtils.deleteFile(journalFile.getParentFile(), true);
}
 
Example 14
@BeforeClass
public static void findJournalSizes() throws IOException {
    // determine header and record size

    final Map<String, String> attributes = new HashMap<>();
    final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
    builder.setEventTime(System.currentTimeMillis());
    builder.setEventType(ProvenanceEventType.RECEIVE);
    builder.setTransitUri("nifi://unit-test");
    attributes.put("uuid", "12345678-0000-0000-0000-012345678912");
    builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
    builder.setComponentId("1234");
    builder.setComponentType("dummy processor");
    final ProvenanceEventRecord record = builder.build();
    builder.setComponentId("2345");
    final ProvenanceEventRecord record2 = builder.build();

    final File tempRecordFile = tempFolder.newFile("record.tmp");
    System.out.println("findJournalSizes position 0 = " + tempRecordFile.length());

    final AtomicLong idGenerator = new AtomicLong(0L);
    final RecordWriter writer = RecordWriters.newSchemaRecordWriter(tempRecordFile, idGenerator, false, false);
    writer.writeHeader(12345L);
    writer.flush();
    headerSize = Long.valueOf(tempRecordFile.length()).intValue();
    writer.writeRecord(record);
    writer.flush();
    recordSize = Long.valueOf(tempRecordFile.length()).intValue() - headerSize;
    writer.writeRecord(record2);
    writer.flush();
    recordSize2 = Long.valueOf(tempRecordFile.length()).intValue() - headerSize - recordSize;
    writer.close();

    System.out.println("headerSize =" + headerSize);
    System.out.println("recordSize =" + recordSize);
    System.out.println("recordSize2=" + recordSize2);
}
 
Example 15
RecordWriter[] getWriters() {
    Class klass = PersistentProvenanceRepository.class;
    Field writersField;
    RecordWriter[] writers = null;
    try {
        writersField = klass.getDeclaredField("writers");
        writersField.setAccessible(true);
        writers = (RecordWriter[]) writersField.get(this);
    } catch (NoSuchFieldException | IllegalAccessException e) {
        e.printStackTrace();
    }
    return writers;
}
 
Example 16
@BeforeClass
public static void findJournalSizes() throws IOException {
    // determine header and record size

    final Map<String, String> attributes = new HashMap<>();
    final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
    builder.setEventTime(System.currentTimeMillis());
    builder.setEventType(ProvenanceEventType.RECEIVE);
    builder.setTransitUri("nifi://unit-test");
    attributes.put("uuid", "12345678-0000-0000-0000-012345678912");
    builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
    builder.setComponentId("1234");
    builder.setComponentType("dummy processor");
    final ProvenanceEventRecord record = builder.build();
    builder.setComponentId("2345");
    final ProvenanceEventRecord record2 = builder.build();

    final File tempRecordFile = tempFolder.newFile("record.tmp");
    System.out.println("findJournalSizes position 0 = " + tempRecordFile.length());

    final AtomicLong idGenerator = new AtomicLong(0L);
    final RecordWriter writer = RecordWriters.newSchemaRecordWriter(tempRecordFile, idGenerator, false, false);
    writer.writeHeader(12345L);
    writer.flush();
    headerSize = Long.valueOf(tempRecordFile.length()).intValue();
    writer.writeRecord(record);
    writer.flush();
    recordSize = Long.valueOf(tempRecordFile.length()).intValue() - headerSize;
    writer.writeRecord(record2);
    writer.flush();
    recordSize2 = Long.valueOf(tempRecordFile.length()).intValue() - headerSize - recordSize;
    writer.close();

    System.out.println("headerSize =" + headerSize);
    System.out.println("recordSize =" + recordSize);
    System.out.println("recordSize2=" + recordSize2);
}
 
Example 17
RecordWriter[] getWriters() {
    Class klass = MiNiFiPersistentProvenanceRepository.class;
    Field writersField;
    RecordWriter[] writers = null;
    try {
        writersField = klass.getDeclaredField("writers");
        writersField.setAccessible(true);
        writers = (RecordWriter[]) writersField.get(this);
    } catch (NoSuchFieldException | IllegalAccessException e) {
        e.printStackTrace();
    }
    return writers;
}
 
Example 18
Source Project: nifi   Source File: RecordWriterLease.java    License: Apache License 2.0 5 votes vote down vote up
public RecordWriterLease(final RecordWriter writer, final long maxBytes, final int maxEvents, final long maxMillis) {
    this.writer = writer;
    this.maxBytes = maxBytes;
    this.maxEvents = maxEvents;

    // The max timestamp that we want to write to this lease is X number of milliseconds into the future.
    // We don't want X to be more than the given max millis. However, we also don't want to allow it to get too large. If it
    // becomes >= Integer.MAX_VALUE, we could have some timestamp offsets that rollover into the negative range.
    // To avoid that, we could use a value that is no more than Integer.MAX_VALUE. But since the event may be persisted
    // a bit after the lease has been obtained, we subtract 1 hour from that time to give ourselves a little buffer room.
    this.maxSystemTime = System.currentTimeMillis() + Math.min(maxMillis, Integer.MAX_VALUE - TimeUnit.HOURS.toMillis(1));
}
 
Example 19
Source Project: nifi   Source File: WriteAheadStorePartition.java    License: Apache License 2.0 5 votes vote down vote up
private Map<ProvenanceEventRecord, StorageSummary> addEvents(final Iterable<ProvenanceEventRecord> events, final RecordWriter writer) throws IOException {
    final Map<ProvenanceEventRecord, StorageSummary> locationMap = new HashMap<>();

    try {
        long maxId = -1L;
        int numEvents = 0;
        for (final ProvenanceEventRecord nextEvent : events) {
            final StorageSummary writerSummary = writer.writeRecord(nextEvent);
            final StorageSummary summaryWithIndex = new StorageSummary(writerSummary.getEventId(), writerSummary.getStorageLocation(), this.partitionName,
                writerSummary.getBlockIndex(), writerSummary.getSerializedLength(), writerSummary.getBytesWritten());
            locationMap.put(nextEvent, summaryWithIndex);
            maxId = summaryWithIndex.getEventId();
            numEvents++;
        }

        if (numEvents == 0) {
            return locationMap;
        }

        writer.flush();

        // Update max event id to be equal to be the greater of the current value or the
        // max value just written.
        final long maxIdWritten = maxId;
        this.maxEventId.getAndUpdate(cur -> Math.max(maxIdWritten, cur));

        if (config.isAlwaysSync()) {
            writer.sync();
        }
    } catch (final Exception e) {
        // We need to set the repoDirty flag before we release the lock for this journal.
        // Otherwise, another thread may write to this journal -- this is a problem because
        // the journal contains part of our record but not all of it. Writing to the end of this
        // journal will result in corruption!
        writer.markDirty();
        throw e;
    }

    return locationMap;
}
 
Example 20
Source Project: nifi   Source File: ITestPersistentProvenanceRepository.java    License: Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void findJournalSizes() throws IOException {
    // determine header and record size

    final Map<String, String> attributes = new HashMap<>();
    final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
    builder.setEventTime(System.currentTimeMillis());
    builder.setEventType(ProvenanceEventType.RECEIVE);
    builder.setTransitUri("nifi://unit-test");
    attributes.put("uuid", "12345678-0000-0000-0000-012345678912");
    builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
    builder.setComponentId("1234");
    builder.setComponentType("dummy processor");
    final ProvenanceEventRecord record = builder.build();
    builder.setComponentId("2345");
    final ProvenanceEventRecord record2 = builder.build();

    final File tempRecordFile = tempFolder.newFile("record.tmp");
    System.out.println("findJournalSizes position 0 = " + tempRecordFile.length());

    final AtomicLong idGenerator = new AtomicLong(0L);
    final RecordWriter writer = RecordWriters.newSchemaRecordWriter(tempRecordFile, idGenerator, false, false);
    writer.writeHeader(12345L);
    writer.flush();
    headerSize = Long.valueOf(tempRecordFile.length()).intValue();
    writer.writeRecord(record);
    writer.flush();
    recordSize = Long.valueOf(tempRecordFile.length()).intValue() - headerSize;
    writer.writeRecord(record2);
    writer.flush();
    recordSize2 = Long.valueOf(tempRecordFile.length()).intValue() - headerSize - recordSize;
    writer.close();

    System.out.println("headerSize =" + headerSize);
    System.out.println("recordSize =" + recordSize);
    System.out.println("recordSize2=" + recordSize2);
}
 
Example 21
Source Project: nifi   Source File: ITestPersistentProvenanceRepository.java    License: Apache License 2.0 5 votes vote down vote up
RecordWriter[] getWriters() {
    Class klass = PersistentProvenanceRepository.class;
    Field writersField;
    RecordWriter[] writers = null;
    try {
        writersField = klass.getDeclaredField("writers");
        writersField.setAccessible(true);
        writers = (RecordWriter[]) writersField.get(this);
    } catch (NoSuchFieldException | IllegalAccessException e) {
        e.printStackTrace();
    }
    return writers;
}
 
Example 22
Source Project: nifi   Source File: AbstractTestRecordReaderWriter.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testMultipleRecordsSameBlockCompressed() throws IOException {
    final File journalFile = new File("target/storage/" + UUID.randomUUID().toString() + "/testSimpleWrite.gz");
    final File tocFile = TocUtil.getTocFile(journalFile);
    final TocWriter tocWriter = new StandardTocWriter(tocFile, false, false);
    // new record each 1 MB of uncompressed data
    final RecordWriter writer = createWriter(journalFile, tocWriter, true, 1024 * 1024);

    writer.writeHeader(1L);
    for (int i = 0; i < 10; i++) {
        writer.writeRecord(createEvent());
    }
    writer.close();

    final TocReader tocReader = new StandardTocReader(tocFile);

    try (final FileInputStream fis = new FileInputStream(journalFile);
        final RecordReader reader = createReader(fis, journalFile.getName(), tocReader, 2048)) {
        for (int i = 0; i < 10; i++) {
            assertEquals(0, reader.getBlockIndex());

            // call skipToBlock half the time to ensure that we can; avoid calling it
            // the other half of the time to ensure that it's okay.
            if (i <= 5) {
                reader.skipToBlock(0);
            }

            final StandardProvenanceEventRecord recovered = reader.nextRecord();
            assertNotNull(recovered);
            assertEquals("nifi://unit-test", recovered.getTransitUri());
        }

        assertNull(reader.nextRecord());
    }

    FileUtils.deleteFile(journalFile.getParentFile(), true);
}
 
Example 23
Source Project: nifi   Source File: AbstractTestRecordReaderWriter.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testMultipleRecordsMultipleBlocksCompressed() throws IOException {
    final File journalFile = new File("target/storage/" + UUID.randomUUID().toString() + "/testSimpleWrite.gz");
    final File tocFile = TocUtil.getTocFile(journalFile);
    final TocWriter tocWriter = new StandardTocWriter(tocFile, false, false);
    // new block each 10 bytes
    final RecordWriter writer = createWriter(journalFile, tocWriter, true, 100);

    writer.writeHeader(1L);
    for (int i = 0; i < 10; i++) {
        writer.writeRecord(createEvent());
    }
    writer.close();

    final TocReader tocReader = new StandardTocReader(tocFile);

    try (final FileInputStream fis = new FileInputStream(journalFile);
        final RecordReader reader = createReader(fis, journalFile.getName(), tocReader, 2048)) {
        for (int i = 0; i < 10; i++) {
            final StandardProvenanceEventRecord recovered = reader.nextRecord();
            System.out.println(recovered);
            assertNotNull(recovered);
            assertEquals(i, recovered.getEventId());
            assertEquals("nifi://unit-test", recovered.getTransitUri());

            final Map<String, String> updatedAttrs = recovered.getUpdatedAttributes();
            assertNotNull(updatedAttrs);
            assertEquals(2, updatedAttrs.size());
            assertEquals("1.txt", updatedAttrs.get("filename"));
            assertTrue(updatedAttrs.containsKey("uuid"));
        }

        assertNull(reader.nextRecord());
    }

    FileUtils.deleteFile(journalFile.getParentFile(), true);
}
 
Example 24
Source Project: localization_nifi   Source File: RecordWriterLease.java    License: Apache License 2.0 4 votes vote down vote up
public RecordWriterLease(final RecordWriter writer, final long maxBytes) {
    this(writer, maxBytes, Integer.MAX_VALUE);
}
 
Example 25
Source Project: localization_nifi   Source File: RecordWriterLease.java    License: Apache License 2.0 4 votes vote down vote up
public RecordWriterLease(final RecordWriter writer, final long maxBytes, final int maxEvents) {
    this.writer = writer;
    this.maxBytes = maxBytes;
    this.maxEvents = maxEvents;
}
 
Example 26
Source Project: localization_nifi   Source File: RecordWriterLease.java    License: Apache License 2.0 4 votes vote down vote up
public RecordWriter getWriter() {
    return writer;
}
 
Example 27
private synchronized boolean tryRollover(final RecordWriterLease lease) throws IOException {
    if (!Objects.equals(lease, eventWriterLeaseRef.get())) {
        return false;
    }

    final long nextEventId = idGenerator.get();
    final File updatedEventFile = new File(partitionDirectory, nextEventId + ".prov");
    final RecordWriter updatedWriter = recordWriterFactory.createWriter(updatedEventFile, idGenerator, false, true);
    final RecordWriterLease updatedLease = new RecordWriterLease(updatedWriter, config.getMaxEventFileCapacity(), config.getMaxEventFileCount());
    final boolean updated = eventWriterLeaseRef.compareAndSet(lease, updatedLease);

    if (updated) {
        updatedWriter.writeHeader(nextEventId);

        synchronized (minEventIdToPathMap) {
            minEventIdToPathMap.put(nextEventId, updatedEventFile);
        }

        if (config.isCompressOnRollover() && lease != null && lease.getWriter() != null) {
            boolean offered = false;
            while (!offered && !closed) {
                try {
                    offered = filesToCompress.offer(lease.getWriter().getFile(), 1, TimeUnit.SECONDS);
                } catch (final InterruptedException ie) {
                    Thread.currentThread().interrupt();
                    throw new IOException("Interrupted while waiting to enqueue " + lease.getWriter().getFile() + " for compression");
                }
            }
        }

        return true;
    } else {
        try {
            updatedWriter.close();
        } catch (final Exception e) {
            logger.warn("Failed to close Record Writer {}; some resources may not be cleaned up properly.", updatedWriter, e);
        }

        updatedEventFile.delete();
        return false;
    }
}
 
Example 28
@Test
public void testContentClaimUnchanged() throws IOException {
    final File journalFile = new File("target/storage/" + UUID.randomUUID().toString() + "/testSimpleWrite.gz");
    final File tocFile = TocUtil.getTocFile(journalFile);
    final TocWriter tocWriter = new StandardTocWriter(tocFile, false, false);
    final RecordWriter writer = createWriter(journalFile, tocWriter, true, 8192);

    final Map<String, String> attributes = new HashMap<>();
    attributes.put("filename", "1.txt");
    attributes.put("uuid", UUID.randomUUID().toString());

    final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
    builder.setEventTime(System.currentTimeMillis());
    builder.setEventType(ProvenanceEventType.RECEIVE);
    builder.setTransitUri("nifi://unit-test");
    builder.fromFlowFile(TestUtil.createFlowFile(3L, 3000L, attributes));
    builder.setComponentId("1234");
    builder.setComponentType("dummy processor");
    builder.setPreviousContentClaim("container-1", "section-1", "identifier-1", 1L, 1L);
    builder.setCurrentContentClaim("container-1", "section-1", "identifier-1", 1L, 1L);
    final ProvenanceEventRecord record = builder.build();

    writer.writeHeader(1L);
    writer.writeRecord(record);
    writer.close();

    final TocReader tocReader = new StandardTocReader(tocFile);

    try (final FileInputStream fis = new FileInputStream(journalFile);
        final RecordReader reader = createReader(fis, journalFile.getName(), tocReader, 2048)) {
        assertEquals(0, reader.getBlockIndex());
        reader.skipToBlock(0);
        final StandardProvenanceEventRecord recovered = reader.nextRecord();
        assertNotNull(recovered);

        assertEquals("nifi://unit-test", recovered.getTransitUri());

        assertEquals("container-1", recovered.getPreviousContentClaimContainer());
        assertEquals("container-1", recovered.getContentClaimContainer());

        assertEquals("section-1", recovered.getPreviousContentClaimSection());
        assertEquals("section-1", recovered.getContentClaimSection());

        assertEquals("identifier-1", recovered.getPreviousContentClaimIdentifier());
        assertEquals("identifier-1", recovered.getContentClaimIdentifier());

        assertEquals(1L, recovered.getPreviousContentClaimOffset().longValue());
        assertEquals(1L, recovered.getContentClaimOffset().longValue());

        assertEquals(1L, recovered.getPreviousFileSize().longValue());
        assertEquals(1L, recovered.getContentClaimOffset().longValue());

        assertNull(reader.nextRecord());
    }

    FileUtils.deleteFile(journalFile.getParentFile(), true);
}
 
Example 29
@Test
public void testContentClaimRemoved() throws IOException {
    final File journalFile = new File("target/storage/" + UUID.randomUUID().toString() + "/testSimpleWrite.gz");
    final File tocFile = TocUtil.getTocFile(journalFile);
    final TocWriter tocWriter = new StandardTocWriter(tocFile, false, false);
    final RecordWriter writer = createWriter(journalFile, tocWriter, true, 8192);

    final Map<String, String> attributes = new HashMap<>();
    attributes.put("filename", "1.txt");
    attributes.put("uuid", UUID.randomUUID().toString());

    final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
    builder.setEventTime(System.currentTimeMillis());
    builder.setEventType(ProvenanceEventType.RECEIVE);
    builder.setTransitUri("nifi://unit-test");
    builder.fromFlowFile(TestUtil.createFlowFile(3L, 3000L, attributes));
    builder.setComponentId("1234");
    builder.setComponentType("dummy processor");
    builder.setPreviousContentClaim("container-1", "section-1", "identifier-1", 1L, 1L);
    builder.setCurrentContentClaim(null, null, null, 0L, 0L);
    final ProvenanceEventRecord record = builder.build();

    writer.writeHeader(1L);
    writer.writeRecord(record);
    writer.close();

    final TocReader tocReader = new StandardTocReader(tocFile);

    try (final FileInputStream fis = new FileInputStream(journalFile);
        final RecordReader reader = createReader(fis, journalFile.getName(), tocReader, 2048)) {
        assertEquals(0, reader.getBlockIndex());
        reader.skipToBlock(0);
        final StandardProvenanceEventRecord recovered = reader.nextRecord();
        assertNotNull(recovered);

        assertEquals("nifi://unit-test", recovered.getTransitUri());

        assertEquals("container-1", recovered.getPreviousContentClaimContainer());
        assertNull(recovered.getContentClaimContainer());

        assertEquals("section-1", recovered.getPreviousContentClaimSection());
        assertNull(recovered.getContentClaimSection());

        assertEquals("identifier-1", recovered.getPreviousContentClaimIdentifier());
        assertNull(recovered.getContentClaimIdentifier());

        assertEquals(1L, recovered.getPreviousContentClaimOffset().longValue());
        assertNull(recovered.getContentClaimOffset());

        assertEquals(1L, recovered.getPreviousFileSize().longValue());
        assertEquals(0L, recovered.getFileSize());

        assertNull(reader.nextRecord());
    }

    FileUtils.deleteFile(journalFile.getParentFile(), true);
}
 
Example 30
@Test
public void testContentClaimAdded() throws IOException {
    final File journalFile = new File("target/storage/" + UUID.randomUUID().toString() + "/testSimpleWrite.gz");
    final File tocFile = TocUtil.getTocFile(journalFile);
    final TocWriter tocWriter = new StandardTocWriter(tocFile, false, false);
    final RecordWriter writer = createWriter(journalFile, tocWriter, true, 8192);

    final Map<String, String> attributes = new HashMap<>();
    attributes.put("filename", "1.txt");
    attributes.put("uuid", UUID.randomUUID().toString());

    final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
    builder.setEventTime(System.currentTimeMillis());
    builder.setEventType(ProvenanceEventType.RECEIVE);
    builder.setTransitUri("nifi://unit-test");
    builder.fromFlowFile(TestUtil.createFlowFile(3L, 3000L, attributes));
    builder.setComponentId("1234");
    builder.setComponentType("dummy processor");
    builder.setCurrentContentClaim("container-1", "section-1", "identifier-1", 1L, 1L);
    final ProvenanceEventRecord record = builder.build();

    writer.writeHeader(1L);
    writer.writeRecord(record);
    writer.close();

    final TocReader tocReader = new StandardTocReader(tocFile);

    try (final FileInputStream fis = new FileInputStream(journalFile);
        final RecordReader reader = createReader(fis, journalFile.getName(), tocReader, 2048)) {
        assertEquals(0, reader.getBlockIndex());
        reader.skipToBlock(0);
        final StandardProvenanceEventRecord recovered = reader.nextRecord();
        assertNotNull(recovered);

        assertEquals("nifi://unit-test", recovered.getTransitUri());

        assertEquals("container-1", recovered.getContentClaimContainer());
        assertNull(recovered.getPreviousContentClaimContainer());

        assertEquals("section-1", recovered.getContentClaimSection());
        assertNull(recovered.getPreviousContentClaimSection());

        assertEquals("identifier-1", recovered.getContentClaimIdentifier());
        assertNull(recovered.getPreviousContentClaimIdentifier());

        assertEquals(1L, recovered.getContentClaimOffset().longValue());
        assertNull(recovered.getPreviousContentClaimOffset());

        assertEquals(1L, recovered.getFileSize());
        assertNull(recovered.getPreviousContentClaimOffset());

        assertNull(reader.nextRecord());
    }

    FileUtils.deleteFile(journalFile.getParentFile(), true);
}