org.apache.commons.compress.archivers.ar.ArArchiveEntry Java Examples

The following examples show how to use org.apache.commons.compress.archivers.ar.ArArchiveEntry. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: DebianPackageWriter.java    From neoscada with Eclipse Public License 1.0 6 votes vote down vote up
public DebianPackageWriter ( final OutputStream stream, final GenericControlFile packageControlFile, final TimestampProvider timestampProvider ) throws IOException
{
    this.packageControlFile = packageControlFile;
    this.timestampProvider = timestampProvider;
    if ( getTimestampProvider () == null )
    {
        throw new IllegalArgumentException ( "'timestampProvider' must not be null" );
    }
    BinaryPackageControlFile.validate ( packageControlFile );

    this.ar = new ArArchiveOutputStream ( stream );

    this.ar.putArchiveEntry ( new ArArchiveEntry ( "debian-binary", this.binaryHeader.length, 0, 0, AR_ARCHIVE_DEFAULT_MODE, getTimestampProvider ().getModTime () / 1000 ) );
    this.ar.write ( this.binaryHeader );
    this.ar.closeArchiveEntry ();

    this.dataTemp = File.createTempFile ( "data", null );

    this.dataStream = new TarArchiveOutputStream ( new GZIPOutputStream ( new FileOutputStream ( this.dataTemp ) ) );
    this.dataStream.setLongFileMode ( TarArchiveOutputStream.LONGFILE_GNU );
}
 
Example #2
Source File: DebianPackageWriter.java    From packagedrone with Eclipse Public License 1.0 6 votes vote down vote up
public DebianPackageWriter ( final OutputStream stream, final BinaryPackageControlFile packageControlFile, final Supplier<Instant> timestampSupplier ) throws IOException
{
    Objects.requireNonNull ( timestampSupplier );

    this.timestampSupplier = timestampSupplier;
    this.packageControlFile = packageControlFile;
    BinaryPackageControlFile.validate ( packageControlFile );

    this.ar = new ArArchiveOutputStream ( stream );

    this.ar.putArchiveEntry ( new ArArchiveEntry ( "debian-binary", this.binaryHeader.length, 0, 0, AR_ARCHIVE_DEFAULT_MODE, timestampSupplier.get ().getEpochSecond () ) );
    this.ar.write ( this.binaryHeader );
    this.ar.closeArchiveEntry ();

    this.dataTemp = File.createTempFile ( "data", null );

    this.dataStream = new TarArchiveOutputStream ( new GZIPOutputStream ( new FileOutputStream ( this.dataTemp ) ) );
    this.dataStream.setLongFileMode ( TarArchiveOutputStream.LONGFILE_GNU );
}
 
Example #3
Source File: AttributeAccessor.java    From jarchivelib with Apache License 2.0 6 votes vote down vote up
/**
 * Detects the type of the given ArchiveEntry and returns an appropriate AttributeAccessor for it.
 * 
 * @param entry the adaptee
 * @return a new attribute accessor instance
 */
public static AttributeAccessor<?> create(ArchiveEntry entry) {
    if (entry instanceof TarArchiveEntry) {
        return new TarAttributeAccessor((TarArchiveEntry) entry);
    } else if (entry instanceof ZipArchiveEntry) {
        return new ZipAttributeAccessor((ZipArchiveEntry) entry);
    } else if (entry instanceof CpioArchiveEntry) {
        return new CpioAttributeAccessor((CpioArchiveEntry) entry);
    } else if (entry instanceof ArjArchiveEntry) {
        return new ArjAttributeAccessor((ArjArchiveEntry) entry);
    } else if (entry instanceof ArArchiveEntry) {
        return new ArAttributeAccessor((ArArchiveEntry) entry);
    }

    return new FallbackAttributeAccessor(entry);
}
 
Example #4
Source File: DebianPackageWriter.java    From neoscada with Eclipse Public License 1.0 5 votes vote down vote up
private void addArFile ( final File file, final String entryName ) throws IOException
{
    final ArArchiveEntry entry = new ArArchiveEntry ( entryName, file.length (), 0, 0, AR_ARCHIVE_DEFAULT_MODE, timestampProvider.getModTime () / 1000 );
    this.ar.putArchiveEntry ( entry );

    ByteStreams.copy ( new FileInputStream ( file ), this.ar );

    this.ar.closeArchiveEntry ();
}
 
Example #5
Source File: Ar.java    From gradle-plugins with MIT License 5 votes vote down vote up
@Override
@SneakyThrows
public void processFile(FileCopyDetailsInternal details) {
    if (details.isDirectory()) {
        return;
    }

    ArArchiveEntry archiveEntry = new ArArchiveEntry(
            details.getPath(),
            details.getSize(),
            0,
            0,
            33188,
            details.getLastModified() / 1000
    );

    try {
        outputFile.putArchiveEntry(archiveEntry);
    } catch (IOException e) {
        log.error(e.getMessage());
        throw e;
    }

    details.copyTo(outputFile);

    outputFile.closeArchiveEntry();
}
 
Example #6
Source File: ArArchiveInputStream.java    From nexus-public with Eclipse Public License 1.0 5 votes vote down vote up
/**
 * Reads the GNU archive String Table.
 *
 * @see #isGNUStringTable
 */
private ArArchiveEntry readGNUStringTable(final byte[] length, final int offset, final int len) throws IOException {
  final int bufflen = asInt(length, offset, len); // Assume length will fit in an int
  namebuffer = new byte[bufflen];
  final int read = IOUtils.readFully(input, namebuffer, 0, bufflen);
  trackReadBytes(read);
  if (read != bufflen){
    throw new IOException("Failed to read complete // record: expected="
        + bufflen + " read=" + read);
  }
  return new ArArchiveEntry(GNU_STRING_TABLE_NAME, bufflen);
}
 
Example #7
Source File: Ar.java    From gradle-plugins with MIT License 5 votes vote down vote up
@Override
@SneakyThrows
public void processFile(FileCopyDetailsInternal details) {
    if (details.isDirectory()) {
        return;
    }

    ArArchiveEntry archiveEntry = new ArArchiveEntry(
            details.getPath(),
            details.getSize(),
            0,
            0,
            33188,
            details.getLastModified() / 1000
    );

    try {
        outputFile.putArchiveEntry(archiveEntry);
    } catch (IOException e) {
        log.error(e.getMessage());
        throw e;
    }

    details.copyTo(outputFile);

    outputFile.closeArchiveEntry();
}
 
Example #8
Source File: DebianPackageWriter.java    From packagedrone with Eclipse Public License 1.0 5 votes vote down vote up
private void addArFile ( final File file, final String entryName, final Supplier<Instant> timestampSupplier ) throws IOException
{
    final ArArchiveEntry entry = new ArArchiveEntry ( entryName, file.length (), 0, 0, AR_ARCHIVE_DEFAULT_MODE, timestampSupplier.get ().getEpochSecond () );
    this.ar.putArchiveEntry ( entry );

    IOUtils.copy ( new FileInputStream ( file ), this.ar );

    this.ar.closeArchiveEntry ();
}
 
Example #9
Source File: CxxLibraryIntegrationTest.java    From buck with Apache License 2.0 5 votes vote down vote up
@Test
public void thinArchivesDoNotContainAbsolutePaths() throws IOException {
  CxxPlatform cxxPlatform =
      CxxPlatformUtils.build(new CxxBuckConfig(FakeBuckConfig.builder().build()));
  BuildRuleResolver ruleResolver = new TestActionGraphBuilder();
  assumeTrue(
      cxxPlatform
          .getAr()
          .resolve(ruleResolver, UnconfiguredTargetConfiguration.INSTANCE)
          .supportsThinArchives());
  ProjectWorkspace workspace =
      TestDataHelper.createProjectWorkspaceForScenario(this, "cxx_library", tmp);
  workspace.setUp();
  Path archive =
      workspace.buildAndReturnOutput("-c", "cxx.archive_contents=thin", "//:foo#default,static");

  // NOTE: Replace the thin header with a normal header just so the commons compress parser
  // can parse the archive contents.
  try (OutputStream outputStream =
      Files.newOutputStream(workspace.getPath(archive), StandardOpenOption.WRITE)) {
    outputStream.write(ObjectFileScrubbers.GLOBAL_HEADER);
  }

  // Now iterate the archive and verify it contains no absolute paths.
  try (ArArchiveInputStream stream =
      new ArArchiveInputStream(new FileInputStream(workspace.getPath(archive).toFile()))) {
    ArArchiveEntry entry;
    while ((entry = stream.getNextArEntry()) != null) {
      if (!entry.getName().isEmpty()) {
        assertFalse(
            "found absolute path: " + entry.getName(),
            workspace.getDestPath().getFileSystem().getPath(entry.getName()).isAbsolute());
      }
    }
  }
}
 
Example #10
Source File: ArchiveStepIntegrationTest.java    From buck with Apache License 2.0 4 votes vote down vote up
@Test
@SuppressWarnings("PMD.AvoidUsingOctalValues")
public void thatGeneratedArchivesAreDeterministic() throws IOException, InterruptedException {
  assumeTrue(Platform.detect() == Platform.MACOS || Platform.detect() == Platform.LINUX);
  ProjectFilesystem filesystem = TestProjectFilesystems.createProjectFilesystem(tmp.getRoot());
  CxxPlatform platform =
      CxxPlatformUtils.build(new CxxBuckConfig(FakeBuckConfig.builder().build()));

  // Build up the paths to various files the archive step will use.
  BuildRuleResolver ruleResolver = new TestActionGraphBuilder();
  Archiver archiver =
      platform.getAr().resolve(ruleResolver, UnconfiguredTargetConfiguration.INSTANCE);
  Path output = filesystem.getPath("output.a");
  Path input = filesystem.getPath("input.dat");
  filesystem.writeContentsToPath("blah", input);
  Preconditions.checkState(filesystem.resolve(input).toFile().setExecutable(true));
  ImmutableList<String> archiverCmd =
      archiver.getCommandPrefix(ruleResolver.getSourcePathResolver());

  // Build an archive step.
  ArchiveStep archiveStep =
      new ArchiveStep(
          filesystem,
          archiver.getEnvironment(ruleResolver.getSourcePathResolver()),
          archiverCmd,
          ImmutableList.of(),
          getArchiveOptions(false),
          output,
          ImmutableList.of(input),
          archiver,
          filesystem.getPath("scratchDir"));
  FileScrubberStep fileScrubberStep =
      new FileScrubberStep(filesystem, output, archiver.getScrubbers());

  // Execute the archive step and verify it ran successfully.
  ExecutionContext executionContext = TestExecutionContext.newInstanceWithRealProcessExecutor();
  TestConsole console = (TestConsole) executionContext.getConsole();
  int exitCode = archiveStep.execute(executionContext).getExitCode();
  assertEquals("archive step failed: " + console.getTextWrittenToStdErr(), 0, exitCode);
  exitCode = fileScrubberStep.execute(executionContext).getExitCode();
  assertEquals("archive scrub step failed: " + console.getTextWrittenToStdErr(), 0, exitCode);

  // Now read the archive entries and verify that the timestamp, UID, and GID fields are
  // zero'd out.
  try (ArArchiveInputStream stream =
      new ArArchiveInputStream(new FileInputStream(filesystem.resolve(output).toFile()))) {
    ArArchiveEntry entry = stream.getNextArEntry();
    assertEquals(
        ObjectFileCommonModificationDate.COMMON_MODIFICATION_TIME_STAMP, entry.getLastModified());
    assertEquals(0, entry.getUserId());
    assertEquals(0, entry.getGroupId());
    assertEquals(String.format("0%o", entry.getMode()), 0100644, entry.getMode());
  }

  // test the beginning of description to make sure it matches the archive command
  String desc = archiveStep.getDescription(executionContext);
  assertThat(desc, Matchers.startsWith(archiverCmd.get(0)));
}
 
Example #11
Source File: ArchiveStepIntegrationTest.java    From buck with Apache License 2.0 4 votes vote down vote up
@Test
public void inputDirs() throws IOException, InterruptedException {
  assumeTrue(Platform.detect() == Platform.MACOS || Platform.detect() == Platform.LINUX);
  ProjectFilesystem filesystem = TestProjectFilesystems.createProjectFilesystem(tmp.getRoot());
  CxxPlatform platform =
      CxxPlatformUtils.build(new CxxBuckConfig(FakeBuckConfig.builder().build()));

  // Build up the paths to various files the archive step will use.
  BuildRuleResolver ruleResolver = new TestActionGraphBuilder();
  Archiver archiver =
      platform.getAr().resolve(ruleResolver, UnconfiguredTargetConfiguration.INSTANCE);
  Path output = filesystem.getPath("output.a");
  Path input = filesystem.getPath("foo/blah.dat");
  filesystem.mkdirs(input.getParent());
  filesystem.writeContentsToPath("blah", input);

  // Build an archive step.
  ArchiveStep archiveStep =
      new ArchiveStep(
          filesystem,
          archiver.getEnvironment(ruleResolver.getSourcePathResolver()),
          archiver.getCommandPrefix(ruleResolver.getSourcePathResolver()),
          ImmutableList.of(),
          getArchiveOptions(false),
          output,
          ImmutableList.of(input.getParent()),
          archiver,
          filesystem.getPath("scratchDir"));

  // Execute the archive step and verify it ran successfully.
  ExecutionContext executionContext = TestExecutionContext.newInstanceWithRealProcessExecutor();
  TestConsole console = (TestConsole) executionContext.getConsole();
  int exitCode = archiveStep.execute(executionContext).getExitCode();
  assertEquals("archive step failed: " + console.getTextWrittenToStdErr(), 0, exitCode);

  // Now read the archive entries and verify that the timestamp, UID, and GID fields are
  // zero'd out.
  try (ArArchiveInputStream stream =
      new ArArchiveInputStream(new FileInputStream(filesystem.resolve(output).toFile()))) {
    ArArchiveEntry entry = stream.getNextArEntry();
    assertThat(entry.getName(), Matchers.equalTo("blah.dat"));
  }
}
 
Example #12
Source File: ArchiveStepIntegrationTest.java    From buck with Apache License 2.0 4 votes vote down vote up
@Test
public void thinArchives() throws IOException, InterruptedException {
  assumeTrue(Platform.detect() == Platform.MACOS || Platform.detect() == Platform.LINUX);
  ProjectFilesystem filesystem = TestProjectFilesystems.createProjectFilesystem(tmp.getRoot());
  CxxPlatform platform =
      CxxPlatformUtils.build(new CxxBuckConfig(FakeBuckConfig.builder().build()));

  // Build up the paths to various files the archive step will use.
  BuildRuleResolver ruleResolver = new TestActionGraphBuilder();
  Archiver archiver =
      platform.getAr().resolve(ruleResolver, UnconfiguredTargetConfiguration.INSTANCE);

  assumeTrue(archiver.supportsThinArchives());

  Path output = filesystem.getPath("foo/libthin.a");
  filesystem.mkdirs(output.getParent());

  // Create a really large input file so it's obvious that the archive is thin.
  Path input = filesystem.getPath("bar/blah.dat");
  filesystem.mkdirs(input.getParent());
  byte[] largeInputFile = new byte[1024 * 1024];
  byte[] fillerToRepeat = "hello\n".getBytes(StandardCharsets.UTF_8);
  for (int i = 0; i < largeInputFile.length; i++) {
    largeInputFile[i] = fillerToRepeat[i % fillerToRepeat.length];
  }
  filesystem.writeBytesToPath(largeInputFile, input);

  // Build an archive step.
  ArchiveStep archiveStep =
      new ArchiveStep(
          filesystem,
          archiver.getEnvironment(ruleResolver.getSourcePathResolver()),
          archiver.getCommandPrefix(ruleResolver.getSourcePathResolver()),
          ImmutableList.of(),
          getArchiveOptions(true),
          output,
          ImmutableList.of(input),
          archiver,
          filesystem.getPath("scratchDir"));

  // Execute the archive step and verify it ran successfully.
  ExecutionContext executionContext = TestExecutionContext.newInstanceWithRealProcessExecutor();
  TestConsole console = (TestConsole) executionContext.getConsole();
  int exitCode = archiveStep.execute(executionContext).getExitCode();
  assertEquals("archive step failed: " + console.getTextWrittenToStdErr(), 0, exitCode);

  // Verify that the thin header is present.
  assertThat(filesystem.readFirstLine(output), Matchers.equalTo(Optional.of("!<thin>")));

  // Verify that even though the archived contents is really big, the archive is still small.
  assertThat(filesystem.getFileSize(output), Matchers.lessThan(1000L));

  // NOTE: Replace the thin header with a normal header just so the commons compress parser
  // can parse the archive contents.
  try (OutputStream outputStream =
      Files.newOutputStream(filesystem.resolve(output), StandardOpenOption.WRITE)) {
    outputStream.write(ObjectFileScrubbers.GLOBAL_HEADER);
  }

  // Now read the archive entries and verify that the timestamp, UID, and GID fields are
  // zero'd out.
  try (ArArchiveInputStream stream =
      new ArArchiveInputStream(new FileInputStream(filesystem.resolve(output).toFile()))) {
    ArArchiveEntry entry = stream.getNextArEntry();

    // Verify that the input names are relative paths from the outputs parent dir.
    assertThat(
        entry.getName(), Matchers.equalTo(output.getParent().relativize(input).toString()));
  }
}
 
Example #13
Source File: AttributeAccessor.java    From jarchivelib with Apache License 2.0 4 votes vote down vote up
public ArAttributeAccessor(ArArchiveEntry entry) {
    super(entry);
}