org.apache.commons.compress.compressors.CompressorInputStream Java Examples

The following examples show how to use org.apache.commons.compress.compressors.CompressorInputStream. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: CompressTools.java    From MyBox with Apache License 2.0 7 votes vote down vote up
public static File decompress(CompressorInputStream compressorInputStream, File targetFile) {
        try {
            if (compressorInputStream == null) {
                return null;
            }
            File file = (targetFile == null) ? FileTools.getTempFile() : targetFile;
            if (file.exists()) {
                file.delete();
            }
            try ( BufferedOutputStream out = new BufferedOutputStream(new FileOutputStream(file))) {
                final byte[] buf = new byte[CommonValues.IOBufferLength];
                int len = -1;
                while (-1 != (len = compressorInputStream.read(buf))) {
                    out.write(buf, 0, len);
                }
            }
            return file;
        } catch (Exception e) {
//            logger.debug(e.toString());
            return null;
        }
    }
 
Example #2
Source File: IOHelper.java    From spring-boot-cookbook with Apache License 2.0 6 votes vote down vote up
public static void printTarGzFile(File tarFile) throws IOException {
    BufferedInputStream bin = new BufferedInputStream(FileUtils.openInputStream(tarFile));
    CompressorInputStream cis = new GzipCompressorInputStream(bin);

    try (TarArchiveInputStream tais = new TarArchiveInputStream(cis)) {
        TarArchiveEntry entry;
        while ((entry = tais.getNextTarEntry()) != null) {
            if (entry.isDirectory()) {
                LOGGER.warn("dir:{}", entry.getName());
            } else {
                int size = (int) entry.getSize();
                byte[] content = new byte[size];
                int readCount = tais.read(content, 0, size);
                LOGGER.info("fileName:{}", entry.getName());
                ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(content, 0, readCount);
                try (LineIterator iterator = IOUtils.lineIterator(byteArrayInputStream, Charset.forName("utf-8"))) {
                    while (iterator.hasNext()) {
                        LOGGER.info("line:{}", iterator.nextLine());
                    }
                }
            }
        }
        LOGGER.info("===============finish===============");
    }
}
 
Example #3
Source File: CommonsCompressor.java    From AutoLoadCache with Apache License 2.0 6 votes vote down vote up
@Override
public byte[] decompress(ByteArrayInputStream bais) throws Exception {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    CompressorInputStream cis = FACTORY.createCompressorInputStream(name, bais);
    int len;
    byte buf[] = new byte[BUFFER];
    while ((len = cis.read(buf, 0, BUFFER)) != -1) {
        baos.write(buf, 0, len);
    }
    cis.close();

    byte[] output = baos.toByteArray();
    baos.flush();
    baos.close();
    bais.close();
    return output;
}
 
Example #4
Source File: Archives.java    From wildfly-maven-plugin with GNU Lesser General Public License v2.1 6 votes vote down vote up
private static Path getArchive(final Path path) throws IOException {
    final Path result;
    // Get the extension
    final String fileName = path.getFileName().toString();
    final String loweredFileName = fileName.toLowerCase(Locale.ENGLISH);
    if (loweredFileName.endsWith(".gz")) {
        String tempFileName = fileName.substring(0, loweredFileName.indexOf(".gz"));
        final int index = tempFileName.lastIndexOf('.');
        if (index > 0) {
            result = Files.createTempFile(tempFileName.substring(0, index), tempFileName.substring(index));
        } else {
            result = Files.createTempFile(tempFileName.substring(0, index), "");
        }
        try (CompressorInputStream in = new CompressorStreamFactory().createCompressorInputStream(new BufferedInputStream(Files.newInputStream(path)))) {
            Files.copy(in, result, StandardCopyOption.REPLACE_EXISTING);
        } catch (CompressorException e) {
            throw new IOException(e);
        }
    } else {
        result = path;
    }
    return result;
}
 
Example #5
Source File: CompressTools.java    From MyBox with Apache License 2.0 5 votes vote down vote up
public static String detectCompressor(BufferedInputStream fileIn, String name) {
    String compressor = null;
    try {
        compressor = CompressorStreamFactory.detect(fileIn);
    } catch (Exception ex) {
        if ("lz4".equals(name)) {
            try ( CompressorInputStream in = new BlockLZ4CompressorInputStream(fileIn)) {
                compressor = "lz4-block";
            } catch (Exception e) {
            }
        }
    }
    return compressor;
}
 
Example #6
Source File: PressUtility.java    From jstarcraft-core with Apache License 2.0 5 votes vote down vote up
/**
 * 将输入流根据指定类型解压到输出流
 * 
 * @param type
 * @param input
 * @param output
 */
public static void decompress(String type, InputStream input, OutputStream output) {
	try (CompressorInputStream compressor = compressorStreamFactory.createCompressorInputStream(type, input)) {
		byte[] buffer = new byte[BUFFER_SIZE];
		int length = -1;
		while ((length = compressor.read(buffer)) != -1) {
			output.write(buffer, 0, length);
		}
	} catch (Exception exception) {
		String message = StringUtility.format("解压{}异常", type);
		throw new IllegalStateException(message, exception);
	}
}
 
Example #7
Source File: FileLineFetcher.java    From hugegraph-loader with Apache License 2.0 5 votes vote down vote up
private static Reader createCompressReader(InputStream stream,
                                           FileSource source)
                                           throws Exception {
    Compression compression = source.compression();
    String charset = source.charset();
    switch (compression) {
        case NONE:
            return new InputStreamReader(stream, charset);
        case GZIP:
        case BZ2:
        case XZ:
        case LZMA:
        case SNAPPY_RAW:
        case SNAPPY_FRAMED:
        case Z:
        case DEFLATE:
        case LZ4_BLOCK:
        case LZ4_FRAMED:
            CompressorStreamFactory factory = new CompressorStreamFactory();
            CompressorInputStream cis = factory.createCompressorInputStream(
                                        compression.string(), stream);
            return new InputStreamReader(cis, charset);
        default:
            throw new LoadException("Unsupported compression format '%s'",
                                    compression);
    }
}
 
Example #8
Source File: WebDriverHandlerImpl.java    From IridiumApplicationTesting with MIT License 5 votes vote down vote up
private String extractZipDriver(
	@NotNull final String driver,
	@NotNull final String name,
	@NotNull final List<File> tempFiles) throws IOException, CompressorException {
	checkNotNull(driver);
	checkArgument(StringUtils.isNotBlank(name));

	final InputStream driverURL = getClass().getResourceAsStream(driver);

	/*
		The driver may not be bundled
	 */
	if (driverURL == null) {
		throw new DriverException("The driver " + driver + " resource does not exist.");
	}

	final CompressorInputStream input = new CompressorStreamFactory()
		.createCompressorInputStream(CompressorStreamFactory.GZIP, driverURL);

	final TarArchiveInputStream tarInput = new TarArchiveInputStream(input);

	/*
		Sometimes tar files contain a "." directory, which we want to ignore.
		So loop until we get a file that isn't in a directory.
	 */
	TarArchiveEntry tarArchiveEntry = tarInput.getNextTarEntry();
	while (tarArchiveEntry.getName().contains("/")) {
		tarArchiveEntry = tarInput.getNextTarEntry();
	}

	return copyDriver(tarInput, name, tempFiles);
}
 
Example #9
Source File: DecompressingContentReader.java    From alfresco-simple-content-stores with Apache License 2.0 5 votes vote down vote up
/**
 * {@inheritDoc}
 */
@Override
public synchronized ReadableByteChannel getReadableChannel() throws ContentIOException
{
    this.ensureDelegate();
    final String mimetype = this.getMimetype();

    LOGGER.debug("Determined mimetype {} as provided via setter / content data - mimetypes to compress are {}", mimetype,
            this.mimetypesToCompress);

    final boolean shouldCompress = this.mimetypesToCompress == null || this.mimetypesToCompress.isEmpty()
            || (mimetype != null && (this.mimetypesToCompress.contains(mimetype) || this.isMimetypeToCompressWildcardMatch(mimetype)));

    ReadableByteChannel channel;
    if (shouldCompress)
    {
        LOGGER.debug("Content will be decompressed from backing store (url={})", this.getContentUrl());

        final String compressiongType = this.compressionType != null && !this.compressionType.trim().isEmpty() ? this.compressionType
                : CompressorStreamFactory.GZIP;
        try
        {
            final CompressorInputStream is = COMPRESSOR_STREAM_FACTORY.createCompressorInputStream(compressiongType,
                    this.delegate.getContentInputStream());
            channel = Channels.newChannel(is);
        }
        catch (final CompressorException e)
        {
            LOGGER.error("Failed to open decompressing channel", e);
            throw new ContentIOException("Failed to open channel: " + this, e);
        }
    }
    else
    {
        LOGGER.debug("Content will not be decompressed from backing store (url={})", this.getContentUrl());
        channel = super.getReadableChannel();
    }

    return channel;
}
 
Example #10
Source File: CommonsCompress.java    From darks-codec with Apache License 2.0 5 votes vote down vote up
/**
 * {@inheritDoc}
 */
@Override
public void uncompress(InputStream input, OutputStream out)
        throws Exception
{
    CompressorInputStream cin = null;
    try
    {
        cin = factory.createCompressorInputStream(type, input);
        byte[] buf = new byte[1024];
        int len;
        while ((len = cin.read(buf)) > 0)
        {
            out.write(buf, 0, len);
        }
        out.flush();
    }
    catch (CompressorException e)
    {
        throw new Exception(
                "Fail to decompress data by commons compress. Cause "
                        + e.getMessage(), e);
    }
    finally
    {
        IoHelper.closeIO(cin);
    }

}
 
Example #11
Source File: TransformTask.java    From kite with Apache License 2.0 4 votes vote down vote up
public PipelineResult run() throws IOException {
  boolean isLocal = (isLocal(from.getDataset()) || isLocal(to.getDataset()));
  if (isLocal) {
    // copy to avoid making changes to the caller's configuration
    Configuration conf = new Configuration(getConf());
    conf.set("mapreduce.framework.name", "local");
    setConf(conf);
  }

  if (isHive(from) || isHive(to)) {
    setConf(addHiveDelegationToken(getConf()));

    // add jars needed for metastore interaction to the classpath
    if (!isLocal) {
      Class<?> fb303Class, thriftClass;
      try {
        // attempt to use libfb303 and libthrift 0.9.2 when async was added
        fb303Class = Class.forName(
            "com.facebook.fb303.FacebookService.AsyncProcessor");
        thriftClass = Class.forName(
            "org.apache.thrift.TBaseAsyncProcessor");
      } catch (ClassNotFoundException e) {
        try {
          // fallback to 0.9.0 or earlier
          fb303Class = Class.forName(
              "com.facebook.fb303.FacebookBase");
          thriftClass = Class.forName(
              "org.apache.thrift.TBase");
        } catch (ClassNotFoundException real) {
          throw new DatasetOperationException(
              "Cannot find thrift dependencies", real);
        }
      }

      TaskUtil.configure(getConf())
          .addJarForClass(Encoder.class) // commons-codec
          .addJarForClass(Log.class) // commons-logging
          .addJarForClass(CompressorInputStream.class) // commons-compress
          .addJarForClass(ApiAdapter.class) // datanucleus-core
          .addJarForClass(JDOAdapter.class) // datanucleus-api-jdo
          .addJarForClass(SQLQuery.class) // datanucleus-rdbms
          .addJarForClass(JDOHelper.class) // jdo-api
          .addJarForClass(Transaction.class) // jta
          .addJarForClass(fb303Class) // libfb303
          .addJarForClass(thriftClass) // libthrift
          .addJarForClass(HiveMetaStore.class) // hive-metastore
          .addJarForClass(HiveConf.class); // hive-exec
    }
  }

  PType<T> toPType = ptype(to);
  MapFn<T, T> validate = new CheckEntityClass<T>(to.getType());

  Pipeline pipeline = new MRPipeline(getClass(), getConf());

  PCollection<T> collection = pipeline.read(CrunchDatasets.asSource(from))
      .parallelDo(transform, toPType).parallelDo(validate, toPType);

  if (compact) {
    // the transform must be run before partitioning
    collection = CrunchDatasets.partition(collection, to, numWriters, numPartitionWriters);
  }

  pipeline.write(collection, CrunchDatasets.asTarget(to), mode);

  PipelineResult result = pipeline.done();

  StageResult sr = Iterables.getFirst(result.getStageResults(), null);
  if (sr != null && MAP_INPUT_RECORDS != null) {
    this.count = sr.getCounterValue(MAP_INPUT_RECORDS);
  }

  return result;
}
 
Example #12
Source File: TestTarContainerPacker.java    From hadoop-ozone with Apache License 2.0 4 votes vote down vote up
@Test
public void pack() throws IOException, CompressorException {

  //GIVEN
  OzoneConfiguration conf = new OzoneConfiguration();

  KeyValueContainerData sourceContainerData =
      createContainer(SOURCE_CONTAINER_ROOT);

  KeyValueContainer sourceContainer =
      new KeyValueContainer(sourceContainerData, conf);

  //sample db file in the metadata directory
  writeDbFile(sourceContainerData, TEST_DB_FILE_NAME);

  //sample chunk file in the chunk directory
  writeChunkFile(sourceContainerData, TEST_CHUNK_FILE_NAME);

  //sample container descriptor file
  writeDescriptor(sourceContainer);

  Path targetFile = TEMP_DIR.resolve("container.tar.gz");

  //WHEN: pack it
  try (FileOutputStream output = new FileOutputStream(targetFile.toFile())) {
    packer.pack(sourceContainer, output);
  }

  //THEN: check the result
  try (FileInputStream input = new FileInputStream(targetFile.toFile())) {
    CompressorInputStream uncompressed = new CompressorStreamFactory()
        .createCompressorInputStream(GZIP, input);
    TarArchiveInputStream tarStream = new TarArchiveInputStream(uncompressed);

    TarArchiveEntry entry;
    Map<String, TarArchiveEntry> entries = new HashMap<>();
    while ((entry = tarStream.getNextTarEntry()) != null) {
      entries.put(entry.getName(), entry);
    }

    Assert.assertTrue(
        entries.containsKey("container.yaml"));

  }

  //read the container descriptor only
  try (FileInputStream input = new FileInputStream(targetFile.toFile())) {
    String containerYaml = new String(packer.unpackContainerDescriptor(input),
        Charset.forName(UTF_8.name()));
    Assert.assertEquals(TEST_DESCRIPTOR_FILE_CONTENT, containerYaml);
  }

  KeyValueContainerData destinationContainerData =
      createContainer(DEST_CONTAINER_ROOT);

  KeyValueContainer destinationContainer =
      new KeyValueContainer(destinationContainerData, conf);

  String descriptor;

  //unpackContainerData
  try (FileInputStream input = new FileInputStream(targetFile.toFile())) {
    descriptor =
        new String(packer.unpackContainerData(destinationContainer, input),
            Charset.forName(UTF_8.name()));
  }

  assertExampleMetadataDbIsGood(
      destinationContainerData.getDbFile().toPath(),
      TEST_DB_FILE_NAME);
  assertExampleChunkFileIsGood(
      Paths.get(destinationContainerData.getChunksPath()),
      TEST_CHUNK_FILE_NAME);
  Assert.assertFalse(
      "Descriptor file should not have been extracted by the "
          + "unpackContainerData Call",
      destinationContainer.getContainerFile().exists());
  Assert.assertEquals(TEST_DESCRIPTOR_FILE_CONTENT, descriptor);
}
 
Example #13
Source File: RollingAppenderSizeTest.java    From logging-log4j2 with Apache License 2.0 4 votes vote down vote up
@Test
public void testAppender() throws Exception {
    final Path path = Paths.get(DIR, "rollingtest.log");
    if (Files.exists(path) && createOnDemand) {
        Assert.fail(String.format("Unexpected file: %s (%s bytes)", path, Files.getAttribute(path, "size")));
    }
    for (int i = 0; i < 500; ++i) {
        logger.debug("This is test message number " + i);
    }
    try {
        Thread.sleep(100);
    } catch (final InterruptedException ie) {
        // Ignore the error.
    }

    final File dir = new File(DIR);
    assertTrue("Directory not created", dir.exists() && dir.listFiles().length > 0);
    final File[] files = dir.listFiles();
    assertNotNull(files);
    assertThat(files, hasItemInArray(that(hasName(that(endsWith(fileExtension))))));

    final FileExtension ext = FileExtension.lookup(fileExtension);
    if (ext == null || FileExtension.ZIP == ext || FileExtension.PACK200 == ext) {
        return; // Apache Commons Compress cannot deflate zip? TODO test decompressing these formats
    }
    // Stop the context to make sure all files are compressed and closed. Trying to remedy failures in CI builds.
    if (!loggerContextRule.getLoggerContext().stop(30, TimeUnit.SECONDS)) {
        System.err.println("Could not stop cleanly " + loggerContextRule + " for " + this);
    }
    for (final File file : files) {
        if (file.getName().endsWith(fileExtension)) {
            CompressorInputStream in = null;
            try (FileInputStream fis = new FileInputStream(file)) {
                try {
                    in = new CompressorStreamFactory().createCompressorInputStream(ext.name().toLowerCase(), fis);
                } catch (final CompressorException ce) {
                    ce.printStackTrace();
                    fail("Error creating input stream from " + file.toString() + ": " + ce.getMessage());
                }
                final ByteArrayOutputStream baos = new ByteArrayOutputStream();
                assertNotNull("No input stream for " + file.getName(), in);
                try {
                    IOUtils.copy(in, baos);
                } catch (final Exception ex) {
                    ex.printStackTrace();
                    fail("Unable to decompress " + file.getAbsolutePath());
                }
                final String text = new String(baos.toByteArray(), Charset.defaultCharset());
                final String[] lines = text.split("[\\r\\n]+");
                for (final String line : lines) {
                    assertTrue(line.contains(
                            "DEBUG o.a.l.l.c.a.r.RollingAppenderSizeTest [main] This is test message number"));
                }
            } finally {
                Closer.close(in);
            }
        }
    }
}
 
Example #14
Source File: CommonsStreamFactory.java    From jarchivelib with Apache License 2.0 4 votes vote down vote up
/**
 * @see {@link CompressorStreamFactory#createCompressorInputStream(InputStream)};
 */
static CompressorInputStream createCompressorInputStream(InputStream in) throws CompressorException {
    return compressorStreamFactory.createCompressorInputStream(in);
}
 
Example #15
Source File: CommonsStreamFactory.java    From jarchivelib with Apache License 2.0 4 votes vote down vote up
/**
 * @see {@link CompressorStreamFactory#createCompressorInputStream(String, java.io.InputStream)}
 */
static CompressorInputStream createCompressorInputStream(CompressionType compressionType, InputStream in)
    throws CompressorException {
    return compressorStreamFactory.createCompressorInputStream(compressionType.getName(), in);
}
 
Example #16
Source File: UnZip.java    From DataHubSystem with GNU Affero General Public License v3.0 4 votes vote down vote up
public static void unCompress (String zip_file, String output_folder)
      throws IOException, CompressorException, ArchiveException
{
   ArchiveInputStream ais = null;
   ArchiveStreamFactory asf = new ArchiveStreamFactory ();

   FileInputStream fis = new FileInputStream (new File (zip_file));

   if (zip_file.toLowerCase ().endsWith (".tar"))
   {
      ais = asf.createArchiveInputStream (
            ArchiveStreamFactory.TAR, fis);
   }
   else if (zip_file.toLowerCase ().endsWith (".zip"))
   {
      ais = asf.createArchiveInputStream (
            ArchiveStreamFactory.ZIP, fis);
   }
   else if (zip_file.toLowerCase ().endsWith (".tgz") ||
         zip_file.toLowerCase ().endsWith (".tar.gz"))
   {
      CompressorInputStream cis = new CompressorStreamFactory ().
            createCompressorInputStream (CompressorStreamFactory.GZIP, fis);
      ais = asf.createArchiveInputStream (new BufferedInputStream (cis));
   }
   else
   {
      try
      {
         fis.close ();
      }
      catch (IOException e)
      {
         LOGGER.warn ("Cannot close FileInputStream:", e);
      }
      throw new IllegalArgumentException (
            "Format not supported: " + zip_file);
   }

   File output_file = new File (output_folder);
   if (!output_file.exists ()) output_file.mkdirs ();

   // copy the existing entries
   ArchiveEntry nextEntry;
   while ((nextEntry = ais.getNextEntry ()) != null)
   {
      File ftemp = new File (output_folder, nextEntry.getName ());
      if (nextEntry.isDirectory ())
      {
         ftemp.mkdir ();
      }
      else
      {
         FileOutputStream fos = FileUtils.openOutputStream (ftemp);
         IOUtils.copy (ais, fos);
         fos.close ();
      }
   }
   ais.close ();
   fis.close ();
}
 
Example #17
Source File: CompressingContentStoreTest.java    From alfresco-simple-content-stores with Apache License 2.0 4 votes vote down vote up
private static void testCompressableMimetype(final CompressingContentStore compressingContentStore,
        final FileContentStore fileContentStore, final String mimetype, final String compression) throws Exception
{
    ContentStoreContext.executeInNewContext(() -> {
        final ContentWriter writer = compressingContentStore.getWriter(new ContentContext(null, null));
        final String testText = CompressingContentStoreTest.generateCopmressableText();
        writer.setMimetype(mimetype);
        writer.setEncoding(StandardCharsets.UTF_8.name());
        writer.setLocale(Locale.ENGLISH);
        writer.putContent(testText);

        final String contentUrl = writer.getContentUrl();
        Assert.assertNotNull("Content URL was not set after writing content", contentUrl);

        final ContentReader properReader = compressingContentStore.getReader(contentUrl);
        Assert.assertTrue("Reader was not returned for freshly written content", properReader != null);
        Assert.assertTrue("Reader does not refer to existing file for freshly written content", properReader.exists());

        // reader does not know about mimetype (provided via persisted ContentData at server runtime)
        properReader.setMimetype(mimetype);

        final String readText = properReader.getContentString();
        Assert.assertEquals("Read content does not match written test content", testText, readText);

        ContentReader backingReader = fileContentStore.getReader(contentUrl);
        Assert.assertTrue("Backing reader was not returned for freshly written content", backingReader != null);
        Assert.assertTrue("Backing reader does not refer to existing file for freshly written content", backingReader.exists());

        backingReader.setMimetype(mimetype);

        // can't test for size as this would (at server runtime) be handled via persisted ContentData, not actual file size on disk
        final String backingText = backingReader.getContentString();
        Assert.assertNotEquals("Backing reader did not return unreadable (compressed) content", testText, backingText);

        backingReader = fileContentStore.getReader(contentUrl);
        backingReader.setMimetype(mimetype);
        try
        {
            final CompressorInputStream inputStream = COMPRESSOR_STREAM_FACTORY.createCompressorInputStream(compression,
                    backingReader.getContentInputStream());
            final BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8.name()));
            final StringBuilder sb = new StringBuilder();
            final char[] buf = new char[1024];
            int read = 0;
            while (read != -1)
            {
                sb.append(buf, 0, read);
                read = reader.read(buf);
            }

            Assert.assertEquals("Decompressed content does not match test content", testText, sb.toString());
        }
        catch (final Exception ex)
        {
            Assert.fail(ex.getMessage());
        }

        return null;
    });
}
 
Example #18
Source File: ModelExtractor.java    From tensorflow with Apache License 2.0 4 votes vote down vote up
public byte[] getModel(Resource modelResource) {

		Assert.notNull(modelResource, "Not null model resource is required!");

		try (InputStream is = modelResource.getInputStream();
			 InputStream bi = new BufferedInputStream(is)) {

			String[] archiveCompressor = detectArchiveAndCompressor(modelResource.getFilename());
			String archive = archiveCompressor[0];
			String compressor = archiveCompressor[1];
			String fragment = modelResource.getURI().getFragment();

			if (StringUtils.hasText(compressor)) {
				try (CompressorInputStream cis = new CompressorStreamFactory().createCompressorInputStream(compressor, bi)) {
					if (StringUtils.hasText(archive)) {
						try (ArchiveInputStream ais = new ArchiveStreamFactory().createArchiveInputStream(archive, cis)) {
							// Compressor with Archive
							return findInArchiveStream(fragment, ais);
						}
					}
					else { // Compressor only
						return StreamUtils.copyToByteArray(cis);
					}
				}
			}
			else if (StringUtils.hasText(archive)) { // Archive only
				try (ArchiveInputStream ais = new ArchiveStreamFactory().createArchiveInputStream(archive, bi)) {
					return findInArchiveStream(fragment, ais);
				}
			}
			else {
				// No compressor nor Archive
				return StreamUtils.copyToByteArray(bi);
			}
		}
		catch (Exception e) {
			throw new IllegalStateException("Failed to extract a model from: " + modelResource.getDescription(), e);
		}
	}
 
Example #19
Source File: CommonsStreamFactory.java    From jarchivelib with Apache License 2.0 2 votes vote down vote up
/**
 * Uses the {@link CompressorStreamFactory} to create a new {@link CompressorInputStream} for the compression type
 * and wraps the given source {@link File} with it.
 * 
 * @param source the file to create the {@link CompressorInputStream} for
 * @return a new {@link CompressorInputStream}
 * @throws IOException if an I/O error occurs
 * @throws CompressorException if the compressor name is not known
 */
static CompressorInputStream createCompressorInputStream(CompressionType type, File source) throws IOException,
    CompressorException {
    return createCompressorInputStream(type, new BufferedInputStream(new FileInputStream(source)));
}
 
Example #20
Source File: CommonsStreamFactory.java    From jarchivelib with Apache License 2.0 2 votes vote down vote up
/**
 * Uses the {@link CompressorStreamFactory} to create a new {@link CompressorInputStream} for the given source
 * {@link File}.
 * 
 * @param source the file to create the {@link CompressorInputStream} for
 * @return a new {@link CompressorInputStream}
 * @throws IOException if an I/O error occurs
 * @throws CompressorException if the compressor name is not known
 */
static CompressorInputStream createCompressorInputStream(File source) throws IOException, CompressorException {
    return createCompressorInputStream(new BufferedInputStream(new FileInputStream(source)));
}