org.apache.commons.compress.compressors.CompressorException Java Examples
The following examples show how to use
org.apache.commons.compress.compressors.CompressorException.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TarContainerPacker.java From hadoop-ozone with Apache License 2.0 | 6 votes |
@Override public byte[] unpackContainerDescriptor(InputStream input) throws IOException { try (InputStream decompressed = decompress(input); ArchiveInputStream archiveInput = untar(decompressed)) { ArchiveEntry entry = archiveInput.getNextEntry(); while (entry != null) { String name = entry.getName(); if (CONTAINER_FILE_NAME.equals(name)) { return readEntry(archiveInput, entry.getSize()); } entry = archiveInput.getNextEntry(); } } catch (CompressorException e) { throw new IOException( "Can't read the container descriptor from the container archive", e); } throw new IOException( "Container descriptor is missing from the container archive."); }
Example #2
Source File: TarInspector.java From buck with Apache License 2.0 | 6 votes |
private static ImmutableMap<String, byte[]> readTar(Optional<String> compressorType, Path tar) throws IOException, CompressorException { HashMap<String, byte[]> result = new HashMap<>(); try (TarArchiveInputStream archiveStream = getArchiveInputStream(compressorType, tar)) { TarArchiveEntry entry; while ((entry = archiveStream.getNextTarEntry()) != null) { ByteArrayOutputStream buffer = new ByteArrayOutputStream(); ByteStreams.copy(archiveStream, buffer); result.put(entry.getName(), buffer.toByteArray()); } } return ImmutableMap.copyOf(result); }
Example #3
Source File: Archives.java From wildfly-maven-plugin with GNU Lesser General Public License v2.1 | 6 votes |
private static Path getArchive(final Path path) throws IOException { final Path result; // Get the extension final String fileName = path.getFileName().toString(); final String loweredFileName = fileName.toLowerCase(Locale.ENGLISH); if (loweredFileName.endsWith(".gz")) { String tempFileName = fileName.substring(0, loweredFileName.indexOf(".gz")); final int index = tempFileName.lastIndexOf('.'); if (index > 0) { result = Files.createTempFile(tempFileName.substring(0, index), tempFileName.substring(index)); } else { result = Files.createTempFile(tempFileName.substring(0, index), ""); } try (CompressorInputStream in = new CompressorStreamFactory().createCompressorInputStream(new BufferedInputStream(Files.newInputStream(path)))) { Files.copy(in, result, StandardCopyOption.REPLACE_EXISTING); } catch (CompressorException e) { throw new IOException(e); } } else { result = path; } return result; }
Example #4
Source File: ProcessingManagerTest.java From DataHubSystem with GNU Affero General Public License v3.0 | 6 votes |
@Test public void system_size() throws IOException, CompressorException, ArchiveException { ProcessingManager mgr = new ProcessingManager(); long size = mgr.system_size(sample); Assert.assertEquals(size, 494928); File folder=sample.getParentFile(); File extaction_folder=new File(folder, "unzip"); extaction_folder.mkdirs(); UnZip.unCompress(sample.getAbsolutePath(), extaction_folder.getAbsolutePath()); File tocheck = extaction_folder.listFiles()[0]; size = mgr.system_size(tocheck); Assert.assertEquals(size, SIZE, tocheck.getAbsolutePath()); }
Example #5
Source File: WorkspaceTest.java From WebIDE-Backend with BSD 3-Clause "New" or "Revised" License | 6 votes |
@Test public void testPack() throws IOException, CompressorException { ws.create("c/a.txt"); ws.create("a/c.txt"); ws.create("中文文件名.txt"); ws.create("a.txt"); ws.create("b.wma"); ws.write("a.txt","111",false,true,true); File targetFile = new File("./target/ws.tar.gz"); try (OutputStream out = new FileOutputStream(targetFile)) { ws.pack("/", out); } assertEntry(targetFile, "a/c.txt"); assertEntry(targetFile, "中文文件名.txt"); targetFile.delete(); }
Example #6
Source File: RPackagesBuilder.java From nexus-repository-r with Eclipse Public License 1.0 | 6 votes |
/** * Using collected package details builds PACKAGES.gz file and returns it as byte array. * <p> * Call this method ONLY after all information about packages is appended to packageInformation map. * * @return PACKAGES.gz as byte array. */ public byte[] buildPackagesGz() throws IOException { try (ByteArrayOutputStream os = new ByteArrayOutputStream()) { CompressorStreamFactory compressorStreamFactory = new CompressorStreamFactory(); try (CompressorOutputStream cos = compressorStreamFactory.createCompressorOutputStream(GZIP, os); OutputStreamWriter writer = new OutputStreamWriter(cos, UTF_8)) { for (Entry<String, Map<String, String>> eachPackage : packageInformation.entrySet()) { writePackageInfo(writer, eachPackage.getValue()); } } return os.toByteArray(); } catch (CompressorException e) { throw new IOException("Error compressing metadata", e); } }
Example #7
Source File: SPARQLQueryBuilderGenericTest.java From inception with Apache License 2.0 | 6 votes |
@SuppressWarnings("resource") private void importData(Repository aRepo, String aUrl) throws IOException { try (InputStream aIS = openAsStream(aUrl)) { InputStream is = new BufferedInputStream(aIS); try { // Stream is expected to be closed by caller of importData is = new CompressorStreamFactory().createCompressorInputStream(is); } catch (CompressorException e) { // Probably not compressed then or unknown format - just try as is. } // Detect the file format RDFFormat format = Rio.getParserFormatForFileName(aUrl).orElse(RDFFormat.RDFXML); try (RepositoryConnection conn = aRepo.getConnection()) { // If the RDF file contains relative URLs, then they probably start with a hash. // To avoid having two hashes here, we drop the hash from the base prefix configured // by the user. String prefix = StringUtils.removeEnd(kb.getBasePrefix(), "#"); conn.add(is, prefix, format); } } }
Example #8
Source File: TarContainerPacker.java From hadoop-ozone with Apache License 2.0 | 6 votes |
/** * Given a containerData include all the required container data/metadata * in a tar file. * * @param container Container to archive (data + metadata). * @param output Destination tar file/stream. */ @Override public void pack(Container<KeyValueContainerData> container, OutputStream output) throws IOException { KeyValueContainerData containerData = container.getContainerData(); try (OutputStream compressed = compress(output); ArchiveOutputStream archiveOutput = tar(compressed)) { includePath(containerData.getDbFile().toPath(), DB_DIR_NAME, archiveOutput); includePath(Paths.get(containerData.getChunksPath()), CHUNKS_DIR_NAME, archiveOutput); includeFile(container.getContainerFile(), CONTAINER_FILE_NAME, archiveOutput); } catch (CompressorException e) { throw new IOException( "Can't compress the container: " + containerData.getContainerID(), e); } }
Example #9
Source File: StreamUtils.java From lucene-solr with Apache License 2.0 | 5 votes |
private OutputStream outputStream(OutputStream os) throws IOException { try { return csfType==null ? os : new CompressorStreamFactory().createCompressorOutputStream(csfType, os); } catch (CompressorException e) { throw new IOException(e.getMessage(), e); } }
Example #10
Source File: Untar.java From buck with Apache License 2.0 | 5 votes |
private TarArchiveInputStream getArchiveInputStream(Path tarFile) throws IOException, CompressorException { BufferedInputStream inputStream = new BufferedInputStream(Files.newInputStream(tarFile)); if (compressorType.isPresent()) { return new TarArchiveInputStream( new CompressorStreamFactory() .createCompressorInputStream(compressorType.get(), inputStream)); } else { return new TarArchiveInputStream(inputStream); } }
Example #11
Source File: TarInspector.java From buck with Apache License 2.0 | 5 votes |
private static TarArchiveInputStream getArchiveInputStream( Optional<String> compressorType, Path tar) throws IOException, CompressorException { BufferedInputStream inputStream = new BufferedInputStream(Files.newInputStream(tar)); if (compressorType.isPresent()) { return new TarArchiveInputStream( new CompressorStreamFactory() .createCompressorInputStream(compressorType.get(), inputStream)); } return new TarArchiveInputStream(inputStream); }
Example #12
Source File: CommonsCompressor.java From jarchivelib with Apache License 2.0 | 5 votes |
@Override public InputStream decompressingStream(InputStream compressedStream) throws IOException { try { return CommonsStreamFactory.createCompressorInputStream(getCompressionType(), compressedStream); } catch (CompressorException e) { throw new IOException(e); } }
Example #13
Source File: TestExecuteSQL.java From nifi with Apache License 2.0 | 5 votes |
@Test public void testCompression() throws SQLException, CompressorException, IOException { // remove previous test database, if any final File dbLocation = new File(DB_LOCATION); dbLocation.delete(); // load test data to database final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection(); Statement stmt = con.createStatement(); try { stmt.execute("drop table TEST_NULL_INT"); } catch (final SQLException sqle) { } stmt.execute("create table TEST_NULL_INT (id integer not null, val1 integer, val2 integer, constraint my_pk primary key (id))"); stmt.execute("insert into TEST_NULL_INT (id, val1, val2) VALUES (0, NULL, 1)"); stmt.execute("insert into TEST_NULL_INT (id, val1, val2) VALUES (1, 1, 1)"); runner.setIncomingConnection(false); runner.setProperty(ExecuteSQL.COMPRESSION_FORMAT, AvroUtil.CodecType.BZIP2.name()); runner.setProperty(ExecuteSQL.SQL_SELECT_QUERY, "SELECT * FROM TEST_NULL_INT"); runner.run(); runner.assertAllFlowFilesTransferred(ExecuteSQL.REL_SUCCESS, 1); MockFlowFile flowFile = runner.getFlowFilesForRelationship(ExecuteSQL.REL_SUCCESS).get(0); try (DataFileStream<GenericRecord> dfs = new DataFileStream<>(new ByteArrayInputStream(flowFile.toByteArray()), new GenericDatumReader<GenericRecord>())) { assertEquals(AvroUtil.CodecType.BZIP2.name().toLowerCase(), dfs.getMetaString(DataFileConstants.CODEC).toLowerCase()); } }
Example #14
Source File: CommonsCompress.java From darks-codec with Apache License 2.0 | 5 votes |
/** * {@inheritDoc} */ @Override public void uncompress(InputStream input, OutputStream out) throws Exception { CompressorInputStream cin = null; try { cin = factory.createCompressorInputStream(type, input); byte[] buf = new byte[1024]; int len; while ((len = cin.read(buf)) > 0) { out.write(buf, 0, len); } out.flush(); } catch (CompressorException e) { throw new Exception( "Fail to decompress data by commons compress. Cause " + e.getMessage(), e); } finally { IoHelper.closeIO(cin); } }
Example #15
Source File: CommonsCompress.java From darks-codec with Apache License 2.0 | 5 votes |
/** * {@inheritDoc} */ @Override public void compress(InputStream input, OutputStream out) throws Exception { CompressorOutputStream cos = null; try { cos = factory.createCompressorOutputStream(type, out); byte[] buf = new byte[1024]; int len; while ((len = input.read(buf)) > 0) { cos.write(buf, 0, len); } cos.flush(); } catch (CompressorException e) { throw new Exception( "Fail to compress data by commons compress. Cause " + e.getMessage(), e); } finally { IoHelper.closeIO(cos); } }
Example #16
Source File: Util.java From WikipediaEntities with GNU Affero General Public License v3.0 | 5 votes |
/** * Open a file, choosing a decompressor if necessary. * * @param fname Filename to open * @return Input stream * @throws FileNotFoundException When the file does not exist */ public static InputStream openInput(String fname) throws FileNotFoundException { InputStream fin = new FileInputStream(fname); try { return new CompressorStreamFactory(true).createCompressorInputStream(new BufferedInputStream(fin)); } catch(CompressorException e) { return fin; } }
Example #17
Source File: CompressionDataParser.java From datacollector with Apache License 2.0 | 5 votes |
public CompressorInput(InputStream inputStream) throws IOException { try { this.inputStream = new CompressorStreamFactory(DECOMPRESS_UNTIL_EOF).createCompressorInputStream( new BufferedInputStream(inputStream)); } catch (CompressorException e) { throw new IOException(e); } }
Example #18
Source File: DecompressingContentReader.java From alfresco-simple-content-stores with Apache License 2.0 | 5 votes |
/** * {@inheritDoc} */ @Override public synchronized ReadableByteChannel getReadableChannel() throws ContentIOException { this.ensureDelegate(); final String mimetype = this.getMimetype(); LOGGER.debug("Determined mimetype {} as provided via setter / content data - mimetypes to compress are {}", mimetype, this.mimetypesToCompress); final boolean shouldCompress = this.mimetypesToCompress == null || this.mimetypesToCompress.isEmpty() || (mimetype != null && (this.mimetypesToCompress.contains(mimetype) || this.isMimetypeToCompressWildcardMatch(mimetype))); ReadableByteChannel channel; if (shouldCompress) { LOGGER.debug("Content will be decompressed from backing store (url={})", this.getContentUrl()); final String compressiongType = this.compressionType != null && !this.compressionType.trim().isEmpty() ? this.compressionType : CompressorStreamFactory.GZIP; try { final CompressorInputStream is = COMPRESSOR_STREAM_FACTORY.createCompressorInputStream(compressiongType, this.delegate.getContentInputStream()); channel = Channels.newChannel(is); } catch (final CompressorException e) { LOGGER.error("Failed to open decompressing channel", e); throw new ContentIOException("Failed to open channel: " + this, e); } } else { LOGGER.debug("Content will not be decompressed from backing store (url={})", this.getContentUrl()); channel = super.getReadableChannel(); } return channel; }
Example #19
Source File: StreamUtils.java From lucene-solr with Apache License 2.0 | 5 votes |
private InputStream inputStream(InputStream in) throws IOException { try { return csfType==null ? in : new CompressorStreamFactory().createCompressorInputStream(csfType, in); } catch (CompressorException e) { throw new IOException(e.getMessage(), e); } }
Example #20
Source File: HalyardSummary.java From Halyard with Apache License 2.0 | 5 votes |
private void setupOutput() throws IOException { String targetUrl = conf.get(TARGET); if (splitOutput || out == null) { if (out != null) { writer.endRDF(); out.close(); } targetUrl = MessageFormat.format(targetUrl, outputCounter++); fsOut = FileSystem.get(URI.create(targetUrl), conf).create(new Path(targetUrl)); out = fsOut; try { if (targetUrl.endsWith(".bz2")) { out = new CompressorStreamFactory().createCompressorOutputStream(CompressorStreamFactory.BZIP2, out); targetUrl = targetUrl.substring(0, targetUrl.length() - 4); } else if (targetUrl.endsWith(".gz")) { out = new CompressorStreamFactory().createCompressorOutputStream(CompressorStreamFactory.GZIP, out); targetUrl = targetUrl.substring(0, targetUrl.length() - 3); } } catch (CompressorException ce) { throw new IOException(ce); } Optional<RDFFormat> form = Rio.getWriterFormatForFileName(targetUrl); if (!form.isPresent()) throw new IOException("Unsupported target file format extension: " + targetUrl); writer = Rio.createWriter(form.get(), out); writer.handleNamespace("", NAMESPACE); writer.handleNamespace(XMLSchema.PREFIX, XMLSchema.NAMESPACE); writer.handleNamespace(RDF.PREFIX, RDF.NAMESPACE); writer.handleNamespace(RDFS.PREFIX, RDFS.NAMESPACE); try (CloseableIteration<? extends Namespace, SailException> iter = sail.getNamespaces()) { while (iter.hasNext()) { Namespace ns = iter.next(); writer.handleNamespace(ns.getPrefix(), ns.getName()); } } writer.startRDF(); } }
Example #21
Source File: CommonsCompressAction.java From logging-log4j2 with Apache License 2.0 | 5 votes |
/** * Compresses a file. * * @param name the compressor name, i.e. "gz", "bzip2", "xz", "pack200", or "deflate". * @param source file to compress, may not be null. * @param destination compressed file, may not be null. * @param deleteSource if true, attempt to delete file on completion. Failure to delete does not cause an exception * to be thrown or affect return value. * * @return true if source file compressed. * @throws IOException on IO exception. */ public static boolean execute(final String name, final File source, final File destination, final boolean deleteSource) throws IOException { if (!source.exists()) { return false; } LOGGER.debug("Starting {} compression of {}", name, source.getPath() ); try (final FileInputStream input = new FileInputStream(source); final BufferedOutputStream output = new BufferedOutputStream( new CompressorStreamFactory().createCompressorOutputStream(name, new FileOutputStream( destination)))) { IOUtils.copy(input, output, BUF_SIZE); LOGGER.debug("Finished {} compression of {}", name, source.getPath() ); } catch (final CompressorException e) { throw new IOException(e); } if (deleteSource) { try { if (Files.deleteIfExists(source.toPath())) { LOGGER.debug("Deleted {}", source.toString()); } else { LOGGER.warn("Unable to delete {} after {} compression. File did not exist", source.toString(), name); } } catch (final Exception ex) { LOGGER.warn("Unable to delete {} after {} compression, {}", source.toString(), name, ex.getMessage()); } } return true; }
Example #22
Source File: InputFile.java From kafka-connect-spooldir with Apache License 2.0 | 5 votes |
public InputStream openStream(boolean buffered) throws IOException { if (null != this.inputStream) { throw new IOException( String.format("File %s is already open", this.inputFile) ); } final String extension = Files.getFileExtension(inputFile.getName()); log.trace("openStream() - fileName = '{}' extension = '{}'", inputFile, extension); this.inputStream = new FileInputStream(this.inputFile); if (buffered) { log.trace( "openStream() - Wrapping '{}' in a BufferedInputStream with bufferSize = {}", this.inputFile, this.bufferSize ); this.inputStream = new BufferedInputStream(this.inputStream, this.bufferSize); } if (SUPPORTED_COMPRESSION_TYPES.containsKey(extension)) { final String compressor = SUPPORTED_COMPRESSION_TYPES.get(extension); log.info("Decompressing {} as {}", inputFile, compressor); final CompressorStreamFactory compressorStreamFactory = new CompressorStreamFactory(); try { this.inputStream = compressorStreamFactory.createCompressorInputStream( compressor, this.inputStream ); } catch (CompressorException e) { throw new IOException("Exception thrown while creating compressor stream " + compressor, e); } } log.info("Creating processing flag {}", this.processingFlag); Files.touch(this.processingFlag); return inputStream; }
Example #23
Source File: WebDriverHandlerImpl.java From IridiumApplicationTesting with MIT License | 5 votes |
private String extractZipDriver( @NotNull final String driver, @NotNull final String name, @NotNull final List<File> tempFiles) throws IOException, CompressorException { checkNotNull(driver); checkArgument(StringUtils.isNotBlank(name)); final InputStream driverURL = getClass().getResourceAsStream(driver); /* The driver may not be bundled */ if (driverURL == null) { throw new DriverException("The driver " + driver + " resource does not exist."); } final CompressorInputStream input = new CompressorStreamFactory() .createCompressorInputStream(CompressorStreamFactory.GZIP, driverURL); final TarArchiveInputStream tarInput = new TarArchiveInputStream(input); /* Sometimes tar files contain a "." directory, which we want to ignore. So loop until we get a file that isn't in a directory. */ TarArchiveEntry tarArchiveEntry = tarInput.getNextTarEntry(); while (tarArchiveEntry.getName().contains("/")) { tarArchiveEntry = tarInput.getNextTarEntry(); } return copyDriver(tarInput, name, tempFiles); }
Example #24
Source File: RDescriptionUtils.java From nexus-repository-r with Eclipse Public License 1.0 | 5 votes |
private static Map<String, String> extractMetadataFromTgz(final InputStream is) { checkNotNull(is); try { final CompressorStreamFactory compressorStreamFactory = new CompressorStreamFactory(); try (InputStream cis = compressorStreamFactory.createCompressorInputStream(GZIP, is)) { return extractMetadataFromArchive(TAR, cis); } } catch (CompressorException | IOException e) { throw new RException(null, e); } }
Example #25
Source File: RPackagesUtils.java From nexus-repository-r with Eclipse Public License 1.0 | 5 votes |
public static Content buildPackages(final Collection<Map<String, String>> entries) throws IOException { CompressorStreamFactory compressorStreamFactory = new CompressorStreamFactory(); ByteArrayOutputStream os = new ByteArrayOutputStream(); try (CompressorOutputStream cos = compressorStreamFactory.createCompressorOutputStream(GZIP, os)) { try (OutputStreamWriter writer = new OutputStreamWriter(cos, UTF_8)) { for (Map<String, String> entry : entries) { InternetHeaders headers = new InternetHeaders(); headers.addHeader(P_PACKAGE, entry.get(P_PACKAGE)); headers.addHeader(P_VERSION, entry.get(P_VERSION)); headers.addHeader(P_DEPENDS, entry.get(P_DEPENDS)); headers.addHeader(P_IMPORTS, entry.get(P_IMPORTS)); headers.addHeader(P_SUGGESTS, entry.get(P_SUGGESTS)); headers.addHeader(P_LINKINGTO, entry.get(P_LINKINGTO)); headers.addHeader(P_LICENSE, entry.get(P_LICENSE)); headers.addHeader(P_NEEDS_COMPILATION, entry.get(P_NEEDS_COMPILATION)); Enumeration<String> headerLines = headers.getAllHeaderLines(); while (headerLines.hasMoreElements()) { String line = headerLines.nextElement(); writer.write(line, 0, line.length()); writer.write('\n'); } writer.write('\n'); } } } catch ( CompressorException e ) { throw new RException(null, e); } return new Content(new BytesPayload(os.toByteArray(), "application/x-gzip")); }
Example #26
Source File: OMDBCheckpointServlet.java From hadoop-ozone with Apache License 2.0 | 5 votes |
/** * Write OM DB Checkpoint to an output stream as a compressed file (tgz). * * @param checkpoint checkpoint file * @param destination desination output stream. * @throws IOException */ public static void writeOmDBCheckpointToStream(DBCheckpoint checkpoint, OutputStream destination) throws IOException { try (CompressorOutputStream gzippedOut = new CompressorStreamFactory() .createCompressorOutputStream(CompressorStreamFactory.GZIP, destination)) { try (ArchiveOutputStream archiveOutputStream = new TarArchiveOutputStream(gzippedOut)) { Path checkpointPath = checkpoint.getCheckpointLocation(); try (Stream<Path> files = Files.list(checkpointPath)) { for (Path path : files.collect(Collectors.toList())) { if (path != null) { Path fileName = path.getFileName(); if (fileName != null) { includeFile(path.toFile(), fileName.toString(), archiveOutputStream); } } } } } } catch (CompressorException e) { throw new IOException( "Can't compress the checkpoint: " + checkpoint.getCheckpointLocation(), e); } }
Example #27
Source File: PackagesGroupHandler.java From nexus-repository-r with Eclipse Public License 1.0 | 5 votes |
protected List<Map<String, String>> parseResponse(@Nonnull final Response response) { Payload payload = checkNotNull(response.getPayload()); try (InputStream in = payload.openInputStream()) { final CompressorStreamFactory compressorStreamFactory = new CompressorStreamFactory(); try (InputStream cin = compressorStreamFactory.createCompressorInputStream(GZIP, in)) { return RPackagesUtils.parseMetadata(cin); } } catch (IOException | CompressorException e) { throw new RException(null, e); } }
Example #28
Source File: IOUtil.java From hugegraph-loader with Apache License 2.0 | 5 votes |
public static void compress(OutputStream stream, Charset charset, Compression compression, String... lines) throws IOException, CompressorException { BufferedOutputStream bos = new BufferedOutputStream(stream); CompressorOutputStream cos = FACTORY.createCompressorOutputStream( compression.string(), bos); for (String line : lines) { cos.write(line.getBytes(charset)); cos.write("\n".getBytes(charset)); } cos.flush(); cos.close(); }
Example #29
Source File: KnowledgeBaseServiceImpl.java From inception with Apache License 2.0 | 5 votes |
@SuppressWarnings("resource") @Override public void importData(KnowledgeBase kb, String aFilename, InputStream aIS) throws RDFParseException, RepositoryException, IOException { if (kb.isReadOnly()) { log.warn("Knowledge base [{}] is read only, will not import!", kb.getName()); return; } InputStream is = new BufferedInputStream(aIS); try { // Stream is expected to be closed by caller of importData is = new CompressorStreamFactory().createCompressorInputStream(is); } catch (CompressorException e) { // Probably not compressed then or unknown format - just try as is. log.debug("Stream is not compressed, continue as is."); } // Detect the file format RDFFormat format = Rio.getParserFormatForFileName(aFilename).orElse(RDFFormat.RDFXML); // Load files into the repository try (RepositoryConnection conn = getConnection(kb)) { // If the RDF file contains relative URLs, then they probably start with a hash. // To avoid having two hashes here, we drop the hash from the base prefix configured // by the user. String prefix = StringUtils.removeEnd(kb.getBasePrefix(), "#"); conn.add(is, prefix, format); } }
Example #30
Source File: RollingAppenderSizeTest.java From logging-log4j2 with Apache License 2.0 | 4 votes |
@Test public void testAppender() throws Exception { final Path path = Paths.get(DIR, "rollingtest.log"); if (Files.exists(path) && createOnDemand) { Assert.fail(String.format("Unexpected file: %s (%s bytes)", path, Files.getAttribute(path, "size"))); } for (int i = 0; i < 500; ++i) { logger.debug("This is test message number " + i); } try { Thread.sleep(100); } catch (final InterruptedException ie) { // Ignore the error. } final File dir = new File(DIR); assertTrue("Directory not created", dir.exists() && dir.listFiles().length > 0); final File[] files = dir.listFiles(); assertNotNull(files); assertThat(files, hasItemInArray(that(hasName(that(endsWith(fileExtension)))))); final FileExtension ext = FileExtension.lookup(fileExtension); if (ext == null || FileExtension.ZIP == ext || FileExtension.PACK200 == ext) { return; // Apache Commons Compress cannot deflate zip? TODO test decompressing these formats } // Stop the context to make sure all files are compressed and closed. Trying to remedy failures in CI builds. if (!loggerContextRule.getLoggerContext().stop(30, TimeUnit.SECONDS)) { System.err.println("Could not stop cleanly " + loggerContextRule + " for " + this); } for (final File file : files) { if (file.getName().endsWith(fileExtension)) { CompressorInputStream in = null; try (FileInputStream fis = new FileInputStream(file)) { try { in = new CompressorStreamFactory().createCompressorInputStream(ext.name().toLowerCase(), fis); } catch (final CompressorException ce) { ce.printStackTrace(); fail("Error creating input stream from " + file.toString() + ": " + ce.getMessage()); } final ByteArrayOutputStream baos = new ByteArrayOutputStream(); assertNotNull("No input stream for " + file.getName(), in); try { IOUtils.copy(in, baos); } catch (final Exception ex) { ex.printStackTrace(); fail("Unable to decompress " + file.getAbsolutePath()); } final String text = new String(baos.toByteArray(), Charset.defaultCharset()); final String[] lines = text.split("[\\r\\n]+"); for (final String line : lines) { assertTrue(line.contains( "DEBUG o.a.l.l.c.a.r.RollingAppenderSizeTest [main] This is test message number")); } } finally { Closer.close(in); } } } }