com.google.common.io.CountingInputStream Java Examples

The following examples show how to use com.google.common.io.CountingInputStream. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ArithmeticCoderTest.java    From indexr with Apache License 2.0 6 votes vote down vote up
@Test
public void encodeDecodeTest() throws IOException {
    ArthmeticCoder.SimpleFrequency freq = new ArthmeticCoder.SimpleFrequency(counts);

    ByteArrayOutputStream encodedPool = new ByteArrayOutputStream();
    CountingOutputStream outputCounting = new CountingOutputStream(encodedPool);
    ArthmeticCoder.Encoder encoder = new ArthmeticCoder.Encoder(freq, new BitWrappedOutputStream(outputCounting));
    for (int s : symbols) {
        encoder.write(s);
    }
    encoder.seal();

    ByteArrayInputStream decodedPool = new ByteArrayInputStream(encodedPool.toByteArray());
    CountingInputStream inputCounting = new CountingInputStream(decodedPool);
    ArthmeticCoder.Decoder decoder = new ArthmeticCoder.Decoder(freq, new BitWrappedInputStream(inputCounting));
    int[] symbols2 = new int[symbols.length];
    for (int i = 0; i < symbols.length; i++) {
        symbols2[i] = decoder.read();
    }

    Assert.assertEquals(outputCounting.getCount(), inputCounting.getCount());
    Assert.assertArrayEquals(symbols, symbols2);
}
 
Example #2
Source File: PrometheusRecordCursor.java    From presto with Apache License 2.0 6 votes vote down vote up
public PrometheusRecordCursor(List<PrometheusColumnHandle> columnHandles, ByteSource byteSource)
{
    this.columnHandles = columnHandles;

    fieldToColumnIndex = new int[columnHandles.size()];
    for (int i = 0; i < columnHandles.size(); i++) {
        PrometheusColumnHandle columnHandle = columnHandles.get(i);
        fieldToColumnIndex[i] = columnHandle.getOrdinalPosition();
    }

    try (CountingInputStream input = new CountingInputStream(byteSource.openStream())) {
        metricsItr = prometheusResultsInStandardizedForm(new PrometheusQueryResponseParse(input).getResults()).iterator();
        totalBytes = input.getCount();
    }
    catch (IOException e) {
        throw new UncheckedIOException(e);
    }
}
 
Example #3
Source File: HprofValidator.java    From hprof-tools with MIT License 6 votes vote down vote up
public static void main(String[] args) {
    try {
        String fileName = args != null && args.length > 0 ? args[0] : "in.hprof";
        CountingInputStream in = new CountingInputStream(new FileInputStream(fileName));
        ValidatingProcessor processor = new ValidatingProcessor(in);
        HprofReader reader = new HprofReader(in, processor);
        while (reader.hasNext()) {
            reader.next();
        }
        // All data loaded, start to check that it is consistent
        processor.verifyClasses();
        processor.verifyInstances();
    }
    catch (IOException e) {
       System.err.print("Failed to process file");
        e.printStackTrace(System.err);
        throw new RuntimeException(e);
    }
}
 
Example #4
Source File: FileProcessor.java    From brooklin with BSD 2-Clause "Simplified" License 6 votes vote down vote up
public FileProcessor(DatastreamTask datastreamTask, DatastreamEventProducer producer) throws FileNotFoundException {
  _task = datastreamTask;
  _fileName = datastreamTask.getDatastreamSource().getConnectionString();
  _positionKey = new FilePositionKey(_task.getTaskPrefix(), _task.getDatastreamTaskName(), Instant.now(), _fileName);
  _positionValue = new FilePositionValue();

  // Set up input streams/readers
  final File file = new File(_fileName);
  _positionValue.setFileLengthBytes(file.length());
  _inputStream = new CountingInputStream(new FileInputStream(file));
  _fileReader = new BufferedReader(new InputStreamReader(_inputStream, StandardCharsets.UTF_8));
  _lineNo = new AtomicInteger();

  _producer = producer;
  _isStopped = false;
  _cancelRequested = false;
  LOG.info("Created FileProcessor for " + datastreamTask);
}
 
Example #5
Source File: OSMMapBuilder.java    From OpenMapKitAndroid with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
@Override
protected JTSModel doInBackground(File... params) {
    File f = params[0];
    fileName = f.getName();
    String absPath = f.getAbsolutePath();
    
    Log.i("BEGIN_PARSING", fileName);
    setFileSize(f.length());
    try {
        InputStream is = new FileInputStream(f);
        countingInputStream = new CountingInputStream(is);
        OSMDataSet ds = OSMXmlParserInOSMMapBuilder.parseFromInputStream(countingInputStream, this);
        if (isOSMEdit) {
            jtsModel.mergeEditedOSMDataSet(absPath, ds);
        } else {
            jtsModel.addOSMDataSet(absPath, ds);
        }
        loadedOSMFiles.add(absPath);
    } catch (Exception e) {
        e.printStackTrace();
    }
    return jtsModel;
}
 
Example #6
Source File: DataBlockHeaderImpl.java    From pulsar with Apache License 2.0 6 votes vote down vote up
public static DataBlockHeader fromStream(InputStream stream) throws IOException {
    CountingInputStream countingStream = new CountingInputStream(stream);
    DataInputStream dis = new DataInputStream(countingStream);
    int magic = dis.readInt();
    if (magic != MAGIC_WORD) {
        throw new IOException("Data block header magic word not match. read: " + magic + " expected: " + MAGIC_WORD);
    }

    long headerLen = dis.readLong();
    long blockLen = dis.readLong();
    long firstEntryId = dis.readLong();
    long toSkip = headerLen - countingStream.getCount();
    if (dis.skip(toSkip) != toSkip) {
        throw new EOFException("Header was too small");
    }

    return new DataBlockHeaderImpl(headerLen, blockLen, firstEntryId);
}
 
Example #7
Source File: EncryptionServicePgpImpl.java    From pgptool with GNU General Public License v3.0 6 votes vote down vote up
/**
 * @param countingStream this stream is passed for progress reporting only.
 *                       Optional, if not provided then return from pIn method
 *                       will be used
 */
private static void pipeStream(InputStream pIn, OutputStream pOut, int bufSize, Updater progress,
		CountingInputStream countingStream) throws IOException, UserRequestedCancellationException {
	byte[] buf = new byte[bufSize];
	long totalRead = 0;

	int len;
	while ((len = pIn.read(buf)) > 0) {
		pOut.write(buf, 0, len);

		if (countingStream == null) {
			totalRead += len;
			updateProgress(progress, totalRead);
		} else {
			updateProgress(progress, countingStream.getCount());
		}
	}
}
 
Example #8
Source File: CStoreSender.java    From healthcare-dicom-dicomweb-adapter with Apache License 2.0 6 votes vote down vote up
@Override
public void send(PubsubMessage message) throws Exception {
  String wadoUri = message.getData().toStringUtf8();
  String qidoUri = qidoFromWadoUri(wadoUri);

  // Invoke QIDO-RS to get DICOM tags needed to invoke C-Store.
  JSONArray qidoResponse = dicomWebClient.qidoRs(qidoUri);
  if (qidoResponse.length() != 1) {
    throw new IllegalArgumentException(
        "Invalid QidoRS JSON array length for response: " + qidoResponse.toString());
  }
  String sopClassUid = AttributesUtil.getTagValue(qidoResponse.getJSONObject(0),
      SOP_CLASS_UID_TAG);
  String sopInstanceUid = AttributesUtil.getTagValue(qidoResponse.getJSONObject(0),
      SOP_INSTANCE_UID_TAG);

  // Invoke WADO-RS to get bulk DICOM.
  InputStream responseStream = dicomWebClient.wadoRs(wadoUri);

  CountingInputStream countingStream = new CountingInputStream(responseStream);
  DicomClient.connectAndCstore(sopClassUid, sopInstanceUid, countingStream,
      applicationEntity, dimsePeerAET, dimsePeerIP, dimsePeerPort);
  MonitoringService.addEvent(Event.BYTES, countingStream.getCount());
}
 
Example #9
Source File: CStoreSender.java    From healthcare-dicom-dicomweb-adapter with Apache License 2.0 6 votes vote down vote up
@Override
public long cstore(AetDictionary.Aet target,
    String studyUid,
    String seriesUid,
    String sopInstanceUid,
    String sopClassUid)
    throws IDicomWebClient.DicomWebException, IOException, InterruptedException {
  String wadoUri =
      String.format("studies/%s/series/%s/instances/%s", studyUid, seriesUid, sopInstanceUid);
  log.info("CStore wadoUri : " + wadoUri);

  InputStream responseStream = dicomWebClient.wadoRs(wadoUri);

  CountingInputStream countingStream = new CountingInputStream(responseStream);
  DicomClient.connectAndCstore(sopClassUid, sopInstanceUid, countingStream,
      applicationEntity, target.getName(), target.getHost(), target.getPort());
  return countingStream.getCount();
}
 
Example #10
Source File: WebpAutoFix.java    From size-analyzer with Apache License 2.0 6 votes vote down vote up
/**
 * Writes the provided bytes into a new file path, based on the initial image's filepath, but with
 * a .webp extension.
 */
public void apply() {
  Path newFilePath =
      filePath.resolveSibling(MoreFiles.getNameWithoutExtension(filePath) + ".webp");
  try (InputStream inputStream = new FileInputStream(new File(filePath.toString()))) {
    CountingInputStream countingStream = new CountingInputStream(inputStream);
    BufferedImage bufferedImage = WebpSuggester.safelyParseImage(countingStream);

    long oldSize = countingStream.getCount();
    byte[] webpBytes = webpConverter.encodeLosslessWebp(bufferedImage);
    Files.write(newFilePath, webpBytes);
    Files.delete(filePath);
  } catch (IOException | ImageReadException e) {
    throw new RuntimeException(e);
  }
}
 
Example #11
Source File: ExampleRecordCursor.java    From presto with Apache License 2.0 6 votes vote down vote up
public ExampleRecordCursor(List<ExampleColumnHandle> columnHandles, ByteSource byteSource)
{
    this.columnHandles = columnHandles;

    fieldToColumnIndex = new int[columnHandles.size()];
    for (int i = 0; i < columnHandles.size(); i++) {
        ExampleColumnHandle columnHandle = columnHandles.get(i);
        fieldToColumnIndex[i] = columnHandle.getOrdinalPosition();
    }

    try (CountingInputStream input = new CountingInputStream(byteSource.openStream())) {
        lines = byteSource.asCharSource(UTF_8).readLines().iterator();
        totalBytes = input.getCount();
    }
    catch (IOException e) {
        throw new UncheckedIOException(e);
    }
}
 
Example #12
Source File: WARCReaderFactory.java    From webarchive-commons with Apache License 2.0 5 votes vote down vote up
/**
 * Constructor.
 * 
 * @param f Uncompressed file to read.
 * @param is InputStream.
 */
public UncompressedWARCReader(final String f, final InputStream is) {
    // Arc file has been tested for existence by time it has come
    // to here.
    setIn(new CountingInputStream(is));
    initialize(f);
}
 
Example #13
Source File: WARCReaderFactory.java    From webarchive-commons with Apache License 2.0 5 votes vote down vote up
/**
 * Constructor.
 * 
 * @param f Uncompressed file to read.
 * @param offset Offset at which to position Reader.
 * @throws IOException
 */
public UncompressedWARCReader(final File f, final long offset)
throws IOException {
    // File has been tested for existence by time it has come to here.
    setIn(new CountingInputStream(getInputStream(f, offset)));
    getIn().skip(offset);
    initialize(f.getAbsolutePath());
}
 
Example #14
Source File: ARCReaderFactory.java    From webarchive-commons with Apache License 2.0 5 votes vote down vote up
/**
 * Constructor.
 * 
 * @param f Uncompressed arc to read.
 * @param is InputStream.
 */
public UncompressedARCReader(final String f, final InputStream is, boolean atFirstRecord) {
    // Arc file has been tested for existence by time it has come
    // to here.
    setIn(new CountingInputStream(is));
    setAlignedOnFirstRecord(atFirstRecord);
    initialize(f);
}
 
Example #15
Source File: ARCReaderFactory.java    From webarchive-commons with Apache License 2.0 5 votes vote down vote up
/**
 * Constructor.
 * 
 * @param f Uncompressed arcfile to read.
 * @param offset Offset at which to position ARCReader.
 * @throws IOException
 */
public UncompressedARCReader(final File f, final long offset)
throws IOException {
    // Arc file has been tested for existence by time it has come
    // to here.
    setIn(new CountingInputStream(getInputStream(f, offset)));
    getIn().skip(offset); 
    initialize(f.getAbsolutePath());
}
 
Example #16
Source File: Tar.java    From phoenicis with GNU Lesser General Public License v3.0 5 votes vote down vote up
List<File> uncompressTarFile(File inputFile, File outputDir, Consumer<ProgressEntity> stateCallback) {
    try (CountingInputStream countingInputStream = new CountingInputStream(new FileInputStream(inputFile))) {
        final long finalSize = FileUtils.sizeOf(inputFile);
        return uncompress(countingInputStream, countingInputStream, outputDir, finalSize, stateCallback);
    } catch (IOException e) {
        throw new ArchiveException(TAR_ERROR_MESSAGE, e);
    }
}
 
Example #17
Source File: GZIPMembersInputStream.java    From webarchive-commons with Apache License 2.0 5 votes vote down vote up
/**
 * Seek forward to a particular offset in the compressed stream. Note
 * that after any seek/skip the memberNumbers may not reflect a member's
 * true ordinal position from the beginning of the stream. 
 * 
 * @param position target position
 * @throws IOException
 */
public void compressedSeek(long position) throws IOException {
    in.reset(); 
    long count = ((CountingInputStream)in).getCount();
    long delta = position - count;
    if(delta<0) {
        throw new IllegalArgumentException("can't seek backwards: seeked "+position+" already at "+count); 
    }
    compressedSkip(delta);
}
 
Example #18
Source File: HTTPRequestResource.java    From webarchive-commons with Apache License 2.0 5 votes vote down vote up
public HTTPRequestResource(MetaData metaData, 
		ResourceContainer container, HttpRequest request,
		boolean forceCheck) {
	super(metaData,container);
	this.request = request;

	MetaData message = metaData.createChild(HTTP_REQUEST_MESSAGE);

	message.putString(HTTP_MESSAGE_METHOD,request.getMessage().getMethodString());
	message.putString(HTTP_MESSAGE_PATH,request.getMessage().getPath());
	message.putString(HTTP_MESSAGE_VERSION,request.getMessage().getVersionString());

	metaData.putLong(HTTP_HEADERS_LENGTH,request.getHeaderBytes());

	if(request.getHeaders().isCorrupt()) {
		metaData.putBoolean(HTTP_HEADERS_CORRUPT,true);
	}

	MetaData headers = metaData.createChild(HTTP_HEADERS_LIST);
	for(HttpHeader h : request.getHeaders()) {
		headers.putString(h.getName(),h.getValue());
		// TODO: handle non-empty request entity (put/post)
	}

	countingIS = new CountingInputStream(request);
	try {
		digIS = 
			new DigestInputStream(countingIS,
					MessageDigest.getInstance("sha1"));
	} catch (NoSuchAlgorithmException e) {
		e.printStackTrace();
	}
}
 
Example #19
Source File: WARCResource.java    From webarchive-commons with Apache License 2.0 5 votes vote down vote up
public WARCResource(MetaData metaData, ResourceContainer container,
		HttpResponse response) throws ResourceParseException {

	super(metaData.createChild(PAYLOAD_METADATA),container);
	envelope = metaData;
	this.response = response;

	long length = -1;
	metaData.putString(ENVELOPE_FORMAT, ENVELOPE_FORMAT_WARC_1_0);
	metaData.putLong(WARC_HEADER_LENGTH, response.getHeaderBytes());
	MetaData fields = metaData.createChild(WARC_HEADER_METADATA);
	for(HttpHeader h : response.getHeaders()) {
		String name = h.getName();
		String value = h.getValue();
		fields.putString(name,value);
		if(name.toLowerCase().equals("content-length")) {
			// TODO: catch formatexception
			length = Long.parseLong(value);
		}
	}

	if(length >= 0) {
		countingIS = new CountingInputStream(
				ByteStreams.limit(response, length));
	} else {
		throw new ResourceParseException(null);
	}
	try {
		digIS = new DigestInputStream(countingIS, 
				MessageDigest.getInstance("sha1"));
	} catch (NoSuchAlgorithmException e) {
		e.printStackTrace();
	}
}
 
Example #20
Source File: ARCResource.java    From webarchive-commons with Apache License 2.0 5 votes vote down vote up
public ARCResource(MetaData metaData, ResourceContainer container, 
		ARCMetaData arcMetaData, InputStream raw) {

	super(metaData.createChild(PAYLOAD_METADATA),container);
	envelope = metaData;
	this.arcMetaData = arcMetaData;
	this.raw = raw;

	metaData.putString(ENVELOPE_FORMAT, ENVELOPE_FORMAT_ARC);
	metaData.putLong(ARC_HEADER_LENGTH, arcMetaData.getHeaderLength());
	long leadingNL = arcMetaData.getLeadingNL();
	if(leadingNL > 0) {
		metaData.putLong(PAYLOAD_LEADING_SLOP_BYTES, leadingNL);
	}
	MetaData fields = metaData.createChild(ARC_HEADER_METADATA);

	fields.putString(URL_KEY, arcMetaData.getUrl());
	fields.putString(IP_KEY, arcMetaData.getIP());
	fields.putString(DATE_STRING_KEY, arcMetaData.getDateString());
	fields.putString(MIME_KEY, arcMetaData.getMime());
	fields.putLong(DECLARED_LENGTH_KEY, arcMetaData.getLength());

	countingIS = new CountingInputStream(
			ByteStreams.limit(raw, arcMetaData.getLength()));

	try {
		digIS = new DigestInputStream(countingIS, 
				MessageDigest.getInstance("sha1"));
	} catch (NoSuchAlgorithmException e) {
		e.printStackTrace();
	}
}
 
Example #21
Source File: InputStreamSliceInput.java    From hive-dwrf with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("IOResourceOpenedButNotSafelyClosed")
public InputStreamSliceInput(InputStream inputStream)
{
    pushbackInputStream = new PushbackInputStream(inputStream);
    countingInputStream = new CountingInputStream(pushbackInputStream);
    dataInputStream = new LittleEndianDataInputStream(countingInputStream);
}
 
Example #22
Source File: SequentialLogWithHeader.java    From c5-replicator with Apache License 2.0 5 votes vote down vote up
private static HeaderWithSize readHeaderFromPersistence(BytePersistence persistence) throws IOException {

    try (CountingInputStream input = getCountingInputStream(persistence.getReader())) {
      final OLogHeader header = decodeAndCheckCrc(input, HEADER_SCHEMA);
      final long headerSize = input.getCount();

      return new HeaderWithSize(header, headerSize);
    }
  }
 
Example #23
Source File: Tar.java    From phoenicis with GNU Lesser General Public License v3.0 5 votes vote down vote up
List<File> uncompressTarGzFile(File inputFile, File outputDir, Consumer<ProgressEntity> stateCallback) {
    try (CountingInputStream countingInputStream = new CountingInputStream(new FileInputStream(inputFile));
            InputStream inputStream = new GZIPInputStream(countingInputStream)) {
        final long finalSize = FileUtils.sizeOf(inputFile);
        return uncompress(inputStream, countingInputStream, outputDir, finalSize, stateCallback);
    } catch (IOException e) {
        throw new ArchiveException(TAR_ERROR_MESSAGE, e);
    }
}
 
Example #24
Source File: Tar.java    From phoenicis with GNU Lesser General Public License v3.0 5 votes vote down vote up
List<File> uncompressTarXzFile(File inputFile, File outputDir, Consumer<ProgressEntity> stateCallback) {
    try (CountingInputStream countingInputStream = new CountingInputStream(new FileInputStream(inputFile));
            InputStream inputStream = new XZCompressorInputStream(countingInputStream)) {
        final long finalSize = FileUtils.sizeOf(inputFile);
        return uncompress(inputStream, countingInputStream, outputDir, finalSize, stateCallback);
    } catch (IOException e) {
        throw new ArchiveException(TAR_ERROR_MESSAGE, e);
    }
}
 
Example #25
Source File: Tar.java    From phoenicis with GNU Lesser General Public License v3.0 5 votes vote down vote up
List<File> uncompressTarBz2File(File inputFile, File outputDir, Consumer<ProgressEntity> stateCallback) {
    try (CountingInputStream countingInputStream = new CountingInputStream(new FileInputStream(inputFile));
            InputStream inputStream = new BZip2CompressorInputStream(countingInputStream)) {
        final long finalSize = FileUtils.sizeOf(inputFile);
        return uncompress(inputStream, countingInputStream, outputDir, finalSize, stateCallback);
    } catch (IOException e) {
        throw new ArchiveException(TAR_ERROR_MESSAGE, e);
    }
}
 
Example #26
Source File: StowRsSender.java    From healthcare-dicom-dicomweb-adapter with Apache License 2.0 5 votes vote down vote up
@Override
public void send(PubsubMessage message) throws Exception {
  // Invoke WADO-RS to get bulk DICOM.
  String wadoUri = message.getData().toStringUtf8();
  InputStream responseStream = sourceDicomWebClient.wadoRs(wadoUri);

  // Send the STOW-RS request to peer DicomWeb service.
  CountingInputStream countingStream = new CountingInputStream(responseStream);
  sinkDicomWebClient.stowRs(countingStream);
  MonitoringService.addEvent(Event.BYTES, countingStream.getCount());
}
 
Example #27
Source File: InputAssist.java    From elastic-load-balancing-tools with Apache License 2.0 5 votes vote down vote up
public InputAssist(InputStream inputStream, boolean enforceChecksum) {
    // use MAX_VALUE before we know the actual header size
    headerSize = Integer.MAX_VALUE;

    cin = new CountingInputStream(inputStream);
    in = new CRC32CInputStream(cin, enforceChecksum);
}
 
Example #28
Source File: PythonDslProjectBuildFileParser.java    From buck with Apache License 2.0 4 votes vote down vote up
private CountingParserInputStream(CountingInputStream inputStream) {
  super(inputStream);
  this.countingInputStream = inputStream;
}
 
Example #29
Source File: PythonDslProjectBuildFileParser.java    From buck with Apache License 2.0 4 votes vote down vote up
private CountingParserInputStream(InputStream inputStream) {
  this(new CountingInputStream(inputStream));
}
 
Example #30
Source File: GZIPArchiveReader.java    From BUbiNG with Apache License 2.0 4 votes vote down vote up
public GZIPArchiveReader(final InputStream input) {
	this.input = new CountingInputStream(input);
	this.repositionableInput = input instanceof RepositionableStream ? (RepositionableStream)input : null;
}