org.apache.hadoop.fs.Options.CreateOpts Java Examples

The following examples show how to use org.apache.hadoop.fs.Options.CreateOpts. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: HdfsBlobContainer.java    From crate with Apache License 2.0 6 votes vote down vote up
@Override
public void writeBlob(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException {
    store.execute((Operation<Void>) fileContext -> {
        Path blob = new Path(path, blobName);
        // we pass CREATE, which means it fails if a blob already exists.
        EnumSet<CreateFlag> flags = failIfAlreadyExists ? EnumSet.of(CreateFlag.CREATE, CreateFlag.SYNC_BLOCK) :
            EnumSet.of(CreateFlag.CREATE, CreateFlag.OVERWRITE, CreateFlag.SYNC_BLOCK);
        CreateOpts[] opts = {CreateOpts.bufferSize(bufferSize)};
        try (FSDataOutputStream stream = fileContext.create(blob, flags, opts)) {
            int bytesRead;
            byte[] buffer = new byte[bufferSize];
            while ((bytesRead = inputStream.read(buffer)) != -1) {
                stream.write(buffer, 0, bytesRead);
                //  For safety we also hsync each write as well, because of its docs:
                //  SYNC_BLOCK - to force closed blocks to the disk device
                // "In addition Syncable.hsync() should be called after each write,
                //  if true synchronous behavior is required"
                stream.hsync();
            }
        } catch (org.apache.hadoop.fs.FileAlreadyExistsException faee) {
            throw new FileAlreadyExistsException(blob.toString(), null, faee.getMessage());
        }
        return null;
    });
}
 
Example #2
Source File: SequenceFile.java    From hadoop with Apache License 2.0 6 votes vote down vote up
/**
 * Construct the preferred type of SequenceFile Writer.
 * @param fs The configured filesystem.
 * @param conf The configuration.
 * @param name The name of the file.
 * @param keyClass The 'key' type.
 * @param valClass The 'value' type.
 * @param bufferSize buffer size for the underlaying outputstream.
 * @param replication replication factor for the file.
 * @param blockSize block size for the file.
 * @param createParent create parent directory if non-existent
 * @param compressionType The compression type.
 * @param codec The compression codec.
 * @param metadata The metadata of the file.
 * @return Returns the handle to the constructed SequenceFile Writer.
 * @throws IOException
 */
@Deprecated
public static Writer
createWriter(FileSystem fs, Configuration conf, Path name,
             Class keyClass, Class valClass, int bufferSize,
             short replication, long blockSize, boolean createParent,
             CompressionType compressionType, CompressionCodec codec,
             Metadata metadata) throws IOException {
  return createWriter(FileContext.getFileContext(fs.getUri(), conf),
      conf, name, keyClass, valClass, compressionType, codec,
      metadata, EnumSet.of(CreateFlag.CREATE,CreateFlag.OVERWRITE),
      CreateOpts.bufferSize(bufferSize),
      createParent ? CreateOpts.createParent()
                   : CreateOpts.donotCreateParent(),
      CreateOpts.repFac(replication),
      CreateOpts.blockSize(blockSize)
    );
}
 
Example #3
Source File: FileContextMainOperationsBaseTest.java    From hadoop with Apache License 2.0 6 votes vote down vote up
@Test
/*
 * Test method
 *  org.apache.hadoop.fs.FileContext.getFileContext(AbstractFileSystem)
 */
public void testGetFileContext1() throws IOException {
  final Path rootPath = getTestRootPath(fc, "test");
  AbstractFileSystem asf = fc.getDefaultFileSystem();
  // create FileContext using the protected #getFileContext(1) method:
  FileContext fc2 = FileContext.getFileContext(asf);
  // Now just check that this context can do something reasonable:
  final Path path = new Path(rootPath, "zoo");
  FSDataOutputStream out = fc2.create(path, EnumSet.of(CREATE),
      Options.CreateOpts.createParent());
  out.close();
  Path pathResolved = fc2.resolvePath(path);
  assertEquals(pathResolved.toUri().getPath(), path.toUri().getPath());
}
 
Example #4
Source File: SymlinkBaseTest.java    From hadoop with Apache License 2.0 6 votes vote down vote up
@Test(timeout=10000)
/** Try to create a file with parent that is a dangling link */
public void testCreateFileViaDanglingLinkParent() throws IOException {
  Path dir  = new Path(testBaseDir1()+"/dangling");
  Path file = new Path(testBaseDir1()+"/dangling/file");
  wrapper.createSymlink(new Path("/doesNotExist"), dir, false);
  FSDataOutputStream out;
  try {
    out = wrapper.create(file, EnumSet.of(CreateFlag.CREATE),
                    CreateOpts.repFac((short) 1),
                    CreateOpts.blockSize(blockSize));
    out.close();
    fail("Created a link with dangling link parent");
  } catch (FileNotFoundException e) {
    // Expected. The parent is dangling.
  }
}
 
Example #5
Source File: LoadGenerator.java    From hadoop with Apache License 2.0 6 votes vote down vote up
/** Create a file with a length of <code>fileSize</code>.
 * The file is filled with 'a'.
 */
private void genFile(Path file, long fileSize) throws IOException {
  long startTime = Time.now();
  FSDataOutputStream out = null;
  try {
    out = fc.create(file,
        EnumSet.of(CreateFlag.CREATE, CreateFlag.OVERWRITE),
        CreateOpts.createParent(), CreateOpts.bufferSize(4096),
        CreateOpts.repFac((short) 3));
    executionTime[CREATE] += (Time.now() - startTime);
    numOfOps[CREATE]++;

    long i = fileSize;
    while (i > 0) {
      long s = Math.min(fileSize, WRITE_CONTENTS.length);
      out.write(WRITE_CONTENTS, 0, (int) s);
      i -= s;
    }

    startTime = Time.now();
    executionTime[WRITE_CLOSE] += (Time.now() - startTime);
    numOfOps[WRITE_CLOSE]++;
  } finally {
    IOUtils.cleanup(LOG, out);
  }
}
 
Example #6
Source File: SequenceFile.java    From gemfirexd-oss with Apache License 2.0 6 votes vote down vote up
/**
 * Construct the preferred type of SequenceFile Writer.
 * @param fs The configured filesystem.
 * @param conf The configuration.
 * @param name The name of the file.
 * @param keyClass The 'key' type.
 * @param valClass The 'value' type.
 * @param bufferSize buffer size for the underlaying outputstream.
 * @param replication replication factor for the file.
 * @param blockSize block size for the file.
 * @param createParent create parent directory if non-existent
 * @param compressionType The compression type.
 * @param codec The compression codec.
 * @param metadata The metadata of the file.
 * @return Returns the handle to the constructed SequenceFile Writer.
 * @throws IOException
 */
@Deprecated
public static Writer
createWriter(FileSystem fs, Configuration conf, Path name,
             Class keyClass, Class valClass, int bufferSize,
             short replication, long blockSize, boolean createParent,
             CompressionType compressionType, CompressionCodec codec,
             Metadata metadata) throws IOException {
  return createWriter(FileContext.getFileContext(fs.getUri(), conf),
      conf, name, keyClass, valClass, compressionType, codec,
      metadata, EnumSet.of(CreateFlag.CREATE,CreateFlag.OVERWRITE),
      CreateOpts.bufferSize(bufferSize),
      createParent ? CreateOpts.createParent()
                   : CreateOpts.donotCreateParent(),
      CreateOpts.repFac(replication),
      CreateOpts.blockSize(blockSize)
    );
}
 
Example #7
Source File: SequenceFile.java    From big-c with Apache License 2.0 6 votes vote down vote up
/**
 * Construct the preferred type of SequenceFile Writer.
 * @param fs The configured filesystem.
 * @param conf The configuration.
 * @param name The name of the file.
 * @param keyClass The 'key' type.
 * @param valClass The 'value' type.
 * @param bufferSize buffer size for the underlaying outputstream.
 * @param replication replication factor for the file.
 * @param blockSize block size for the file.
 * @param createParent create parent directory if non-existent
 * @param compressionType The compression type.
 * @param codec The compression codec.
 * @param metadata The metadata of the file.
 * @return Returns the handle to the constructed SequenceFile Writer.
 * @throws IOException
 */
@Deprecated
public static Writer
createWriter(FileSystem fs, Configuration conf, Path name,
             Class keyClass, Class valClass, int bufferSize,
             short replication, long blockSize, boolean createParent,
             CompressionType compressionType, CompressionCodec codec,
             Metadata metadata) throws IOException {
  return createWriter(FileContext.getFileContext(fs.getUri(), conf),
      conf, name, keyClass, valClass, compressionType, codec,
      metadata, EnumSet.of(CreateFlag.CREATE,CreateFlag.OVERWRITE),
      CreateOpts.bufferSize(bufferSize),
      createParent ? CreateOpts.createParent()
                   : CreateOpts.donotCreateParent(),
      CreateOpts.repFac(replication),
      CreateOpts.blockSize(blockSize)
    );
}
 
Example #8
Source File: FileContextMainOperationsBaseTest.java    From big-c with Apache License 2.0 6 votes vote down vote up
@Test
/*
 * Test method
 *  org.apache.hadoop.fs.FileContext.getFileContext(AbstractFileSystem)
 */
public void testGetFileContext1() throws IOException {
  final Path rootPath = getTestRootPath(fc, "test");
  AbstractFileSystem asf = fc.getDefaultFileSystem();
  // create FileContext using the protected #getFileContext(1) method:
  FileContext fc2 = FileContext.getFileContext(asf);
  // Now just check that this context can do something reasonable:
  final Path path = new Path(rootPath, "zoo");
  FSDataOutputStream out = fc2.create(path, EnumSet.of(CREATE),
      Options.CreateOpts.createParent());
  out.close();
  Path pathResolved = fc2.resolvePath(path);
  assertEquals(pathResolved.toUri().getPath(), path.toUri().getPath());
}
 
Example #9
Source File: SymlinkBaseTest.java    From big-c with Apache License 2.0 6 votes vote down vote up
@Test(timeout=10000)
/** Try to create a file with parent that is a dangling link */
public void testCreateFileViaDanglingLinkParent() throws IOException {
  Path dir  = new Path(testBaseDir1()+"/dangling");
  Path file = new Path(testBaseDir1()+"/dangling/file");
  wrapper.createSymlink(new Path("/doesNotExist"), dir, false);
  FSDataOutputStream out;
  try {
    out = wrapper.create(file, EnumSet.of(CreateFlag.CREATE),
                    CreateOpts.repFac((short) 1),
                    CreateOpts.blockSize(blockSize));
    out.close();
    fail("Created a link with dangling link parent");
  } catch (FileNotFoundException e) {
    // Expected. The parent is dangling.
  }
}
 
Example #10
Source File: LoadGenerator.java    From big-c with Apache License 2.0 6 votes vote down vote up
/** Create a file with a length of <code>fileSize</code>.
 * The file is filled with 'a'.
 */
private void genFile(Path file, long fileSize) throws IOException {
  long startTime = Time.now();
  FSDataOutputStream out = null;
  try {
    out = fc.create(file,
        EnumSet.of(CreateFlag.CREATE, CreateFlag.OVERWRITE),
        CreateOpts.createParent(), CreateOpts.bufferSize(4096),
        CreateOpts.repFac((short) 3));
    executionTime[CREATE] += (Time.now() - startTime);
    numOfOps[CREATE]++;

    long i = fileSize;
    while (i > 0) {
      long s = Math.min(fileSize, WRITE_CONTENTS.length);
      out.write(WRITE_CONTENTS, 0, (int) s);
      i -= s;
    }

    startTime = Time.now();
    executionTime[WRITE_CLOSE] += (Time.now() - startTime);
    numOfOps[WRITE_CLOSE]++;
  } finally {
    IOUtils.cleanup(LOG, out);
  }
}
 
Example #11
Source File: SequenceFile.java    From gemfirexd-oss with Apache License 2.0 6 votes vote down vote up
/**
 * Construct the preferred type of SequenceFile Writer.
 * @param fs The configured filesystem.
 * @param conf The configuration.
 * @param name The name of the file.
 * @param keyClass The 'key' type.
 * @param valClass The 'value' type.
 * @param bufferSize buffer size for the underlaying outputstream.
 * @param replication replication factor for the file.
 * @param blockSize block size for the file.
 * @param createParent create parent directory if non-existent
 * @param compressionType The compression type.
 * @param codec The compression codec.
 * @param metadata The metadata of the file.
 * @return Returns the handle to the constructed SequenceFile Writer.
 * @throws IOException
 */
@Deprecated
public static Writer
createWriter(FileSystem fs, Configuration conf, Path name,
             Class keyClass, Class valClass, int bufferSize,
             short replication, long blockSize, boolean createParent,
             CompressionType compressionType, CompressionCodec codec,
             Metadata metadata) throws IOException {
  return createWriter(FileContext.getFileContext(fs.getUri(), conf),
      conf, name, keyClass, valClass, compressionType, codec,
      metadata, EnumSet.of(CreateFlag.CREATE,CreateFlag.OVERWRITE),
      CreateOpts.bufferSize(bufferSize),
      createParent ? CreateOpts.createParent()
                   : CreateOpts.donotCreateParent(),
      CreateOpts.repFac(replication),
      CreateOpts.blockSize(blockSize)
    );
}
 
Example #12
Source File: FileContextTestHelper.java    From hadoop with Apache License 2.0 5 votes vote down vote up
public static long createFile(FileContext fc, Path path, int numBlocks,
    CreateOpts... options) throws IOException {
  BlockSize blockSizeOpt = CreateOpts.getOpt(CreateOpts.BlockSize.class, options);
  long blockSize = blockSizeOpt != null ? blockSizeOpt.getValue()
      : DEFAULT_BLOCK_SIZE;
  FSDataOutputStream out = 
    fc.create(path, EnumSet.of(CreateFlag.CREATE), options);
  byte[] data = getFileData(numBlocks, blockSize);
  out.write(data, 0, data.length);
  out.close();
  return data.length;
}
 
Example #13
Source File: FileContextTestWrapper.java    From hadoop with Apache License 2.0 5 votes vote down vote up
@Override
public FSDataOutputStream create(Path f, EnumSet<CreateFlag> createFlag,
    CreateOpts... opts) throws AccessControlException,
    FileAlreadyExistsException, FileNotFoundException,
    ParentNotDirectoryException, UnsupportedFileSystemException, IOException {
  return fc.create(f, createFlag, opts);
}
 
Example #14
Source File: FileSystemTestWrapper.java    From big-c with Apache License 2.0 5 votes vote down vote up
public void appendToFile(Path path, int numBlocks, CreateOpts... options)
    throws IOException {
  BlockSize blockSizeOpt = CreateOpts.getOpt(CreateOpts.BlockSize.class, options);
  long blockSize = blockSizeOpt != null ? blockSizeOpt.getValue()
      : DEFAULT_BLOCK_SIZE;
  FSDataOutputStream out;
  out = fs.append(path);
  byte[] data = getFileData(numBlocks, blockSize);
  out.write(data, 0, data.length);
  out.close();
}
 
Example #15
Source File: FileSystemTestWrapper.java    From hadoop with Apache License 2.0 5 votes vote down vote up
public long createFile(Path path, int numBlocks, CreateOpts... options)
    throws IOException {
  BlockSize blockSizeOpt = CreateOpts.getOpt(CreateOpts.BlockSize.class, options);
  long blockSize = blockSizeOpt != null ? blockSizeOpt.getValue()
      : DEFAULT_BLOCK_SIZE;
  FSDataOutputStream out =
    create(path, EnumSet.of(CreateFlag.CREATE), options);
  byte[] data = getFileData(numBlocks, blockSize);
  out.write(data, 0, data.length);
  out.close();
  return data.length;
}
 
Example #16
Source File: FileSystemTestWrapper.java    From big-c with Apache License 2.0 5 votes vote down vote up
public long createFile(Path path, int numBlocks, CreateOpts... options)
    throws IOException {
  BlockSize blockSizeOpt = CreateOpts.getOpt(CreateOpts.BlockSize.class, options);
  long blockSize = blockSizeOpt != null ? blockSizeOpt.getValue()
      : DEFAULT_BLOCK_SIZE;
  FSDataOutputStream out =
    create(path, EnumSet.of(CreateFlag.CREATE), options);
  byte[] data = getFileData(numBlocks, blockSize);
  out.write(data, 0, data.length);
  out.close();
  return data.length;
}
 
Example #17
Source File: FileContextTestHelper.java    From big-c with Apache License 2.0 5 votes vote down vote up
public static void writeFile(FileContext fc, Path path, byte b[])
    throws IOException {
  FSDataOutputStream out = 
    fc.create(path,EnumSet.of(CreateFlag.CREATE), CreateOpts.createParent());
  out.write(b);
  out.close();
}
 
Example #18
Source File: FileContextTestHelper.java    From big-c with Apache License 2.0 5 votes vote down vote up
public static void appendToFile(FileContext fc, Path path, int numBlocks,
    CreateOpts... options) throws IOException {
  BlockSize blockSizeOpt = CreateOpts.getOpt(CreateOpts.BlockSize.class, options);
  long blockSize = blockSizeOpt != null ? blockSizeOpt.getValue()
      : DEFAULT_BLOCK_SIZE;
  FSDataOutputStream out;
  out = fc.create(path, EnumSet.of(CreateFlag.APPEND));
  byte[] data = getFileData(numBlocks, blockSize);
  out.write(data, 0, data.length);
  out.close();
}
 
Example #19
Source File: FileSystemTestWrapper.java    From hadoop with Apache License 2.0 5 votes vote down vote up
public void appendToFile(Path path, int numBlocks, CreateOpts... options)
    throws IOException {
  BlockSize blockSizeOpt = CreateOpts.getOpt(CreateOpts.BlockSize.class, options);
  long blockSize = blockSizeOpt != null ? blockSizeOpt.getValue()
      : DEFAULT_BLOCK_SIZE;
  FSDataOutputStream out;
  out = fs.append(path);
  byte[] data = getFileData(numBlocks, blockSize);
  out.write(data, 0, data.length);
  out.close();
}
 
Example #20
Source File: FileContextTestWrapper.java    From hadoop with Apache License 2.0 5 votes vote down vote up
public void appendToFile(Path path, int numBlocks, CreateOpts... options)
    throws IOException {
  BlockSize blockSizeOpt = CreateOpts.getOpt(CreateOpts.BlockSize.class, options);
  long blockSize = blockSizeOpt != null ? blockSizeOpt.getValue()
      : DEFAULT_BLOCK_SIZE;
  FSDataOutputStream out;
  out = fc.create(path, EnumSet.of(CreateFlag.APPEND));
  byte[] data = getFileData(numBlocks, blockSize);
  out.write(data, 0, data.length);
  out.close();
}
 
Example #21
Source File: DataGenerator.java    From hadoop with Apache License 2.0 5 votes vote down vote up
/** Create a file with the name <code>file</code> and 
 * a length of <code>fileSize</code>. The file is filled with character 'a'.
 */
private void genFile(Path file, long fileSize) throws IOException {
  FSDataOutputStream out = fc.create(file,
      EnumSet.of(CreateFlag.CREATE, CreateFlag.OVERWRITE),
      CreateOpts.createParent(), CreateOpts.bufferSize(4096),
      CreateOpts.repFac((short) 3));
  for(long i=0; i<fileSize; i++) {
    out.writeByte('a');
  }
  out.close();
}
 
Example #22
Source File: FileContextTestHelper.java    From big-c with Apache License 2.0 5 votes vote down vote up
public static long createFile(FileContext fc, Path path, int numBlocks,
    CreateOpts... options) throws IOException {
  BlockSize blockSizeOpt = CreateOpts.getOpt(CreateOpts.BlockSize.class, options);
  long blockSize = blockSizeOpt != null ? blockSizeOpt.getValue()
      : DEFAULT_BLOCK_SIZE;
  FSDataOutputStream out = 
    fc.create(path, EnumSet.of(CreateFlag.CREATE), options);
  byte[] data = getFileData(numBlocks, blockSize);
  out.write(data, 0, data.length);
  out.close();
  return data.length;
}
 
Example #23
Source File: FileContextTestWrapper.java    From hadoop with Apache License 2.0 5 votes vote down vote up
public long createFile(Path path, int numBlocks, CreateOpts... options)
    throws IOException {
  BlockSize blockSizeOpt = CreateOpts.getOpt(CreateOpts.BlockSize.class, options);
  long blockSize = blockSizeOpt != null ? blockSizeOpt.getValue()
      : DEFAULT_BLOCK_SIZE;
  FSDataOutputStream out =
    fc.create(path, EnumSet.of(CreateFlag.CREATE), options);
  byte[] data = getFileData(numBlocks, blockSize);
  out.write(data, 0, data.length);
  out.close();
  return data.length;
}
 
Example #24
Source File: FileContextTestWrapper.java    From big-c with Apache License 2.0 5 votes vote down vote up
public long createFile(Path path, int numBlocks, CreateOpts... options)
    throws IOException {
  BlockSize blockSizeOpt = CreateOpts.getOpt(CreateOpts.BlockSize.class, options);
  long blockSize = blockSizeOpt != null ? blockSizeOpt.getValue()
      : DEFAULT_BLOCK_SIZE;
  FSDataOutputStream out =
    fc.create(path, EnumSet.of(CreateFlag.CREATE), options);
  byte[] data = getFileData(numBlocks, blockSize);
  out.write(data, 0, data.length);
  out.close();
  return data.length;
}
 
Example #25
Source File: FileContextMainOperationsBaseTest.java    From big-c with Apache License 2.0 5 votes vote down vote up
@Test
public void testSetVerifyChecksum() throws IOException {
  final Path rootPath = getTestRootPath(fc, "test");
  final Path path = new Path(rootPath, "zoo");

  FSDataOutputStream out = fc.create(path, EnumSet.of(CREATE),
      Options.CreateOpts.createParent());
  try {
    // instruct FS to verify checksum through the FileContext:
    fc.setVerifyChecksum(true, path);
    out.write(data, 0, data.length);
  } finally {
    out.close();
  }

  // NB: underlying FS may be different (this is an abstract test),
  // so we cannot assert .zoo.crc existence.
  // Instead, we check that the file is read correctly:
  FileStatus fileStatus = fc.getFileStatus(path);
  final long len = fileStatus.getLen();
  assertTrue(len == data.length);
  byte[] bb = new byte[(int)len];
  FSDataInputStream fsdis = fc.open(path);
  try {
    fsdis.read(bb);
  } finally {
    fsdis.close();
  }
  assertArrayEquals(data, bb);
}
 
Example #26
Source File: FileContextMainOperationsBaseTest.java    From big-c with Apache License 2.0 5 votes vote down vote up
@Test
public void testOutputStreamClosedTwice() throws IOException {
  //HADOOP-4760 according to Closeable#close() closing already-closed 
  //streams should have no effect. 
  Path src = getTestRootPath(fc, "test/hadoop/file");
  FSDataOutputStream out = fc.create(src, EnumSet.of(CREATE),
          Options.CreateOpts.createParent());
  
  out.writeChar('H'); //write some data
  out.close();
  out.close();
}
 
Example #27
Source File: FileContextTestWrapper.java    From big-c with Apache License 2.0 5 votes vote down vote up
public void appendToFile(Path path, int numBlocks, CreateOpts... options)
    throws IOException {
  BlockSize blockSizeOpt = CreateOpts.getOpt(CreateOpts.BlockSize.class, options);
  long blockSize = blockSizeOpt != null ? blockSizeOpt.getValue()
      : DEFAULT_BLOCK_SIZE;
  FSDataOutputStream out;
  out = fc.create(path, EnumSet.of(CreateFlag.APPEND));
  byte[] data = getFileData(numBlocks, blockSize);
  out.write(data, 0, data.length);
  out.close();
}
 
Example #28
Source File: FileContextMainOperationsBaseTest.java    From big-c with Apache License 2.0 5 votes vote down vote up
private void writeReadAndDelete(int len) throws IOException {
  Path path = getTestRootPath(fc, "test/hadoop/file");
  
  fc.mkdir(path.getParent(), FsPermission.getDefault(), true);

  FSDataOutputStream out = fc.create(path, EnumSet.of(CREATE),
      CreateOpts.repFac((short) 1), CreateOpts
          .blockSize(getDefaultBlockSize()));
  out.write(data, 0, len);
  out.close();

  Assert.assertTrue("Exists", exists(fc, path));
  Assert.assertEquals("Length", len, fc.getFileStatus(path).getLen());

  FSDataInputStream in = fc.open(path);
  byte[] buf = new byte[len];
  in.readFully(0, buf);
  in.close();

  Assert.assertEquals(len, buf.length);
  for (int i = 0; i < buf.length; i++) {
    Assert.assertEquals("Position " + i, data[i], buf[i]);
  }
  
  Assert.assertTrue("Deleted", fc.delete(path, false));
  
  Assert.assertFalse("No longer exists", exists(fc, path));

}
 
Example #29
Source File: FileContextTestWrapper.java    From big-c with Apache License 2.0 5 votes vote down vote up
@Override
public FSDataOutputStream create(Path f, EnumSet<CreateFlag> createFlag,
    CreateOpts... opts) throws AccessControlException,
    FileAlreadyExistsException, FileNotFoundException,
    ParentNotDirectoryException, UnsupportedFileSystemException, IOException {
  return fc.create(f, createFlag, opts);
}
 
Example #30
Source File: DataGenerator.java    From big-c with Apache License 2.0 5 votes vote down vote up
/** Create a file with the name <code>file</code> and 
 * a length of <code>fileSize</code>. The file is filled with character 'a'.
 */
private void genFile(Path file, long fileSize) throws IOException {
  FSDataOutputStream out = fc.create(file,
      EnumSet.of(CreateFlag.CREATE, CreateFlag.OVERWRITE),
      CreateOpts.createParent(), CreateOpts.bufferSize(4096),
      CreateOpts.repFac((short) 3));
  for(long i=0; i<fileSize; i++) {
    out.writeByte('a');
  }
  out.close();
}