Java Code Examples for org.apache.hadoop.fs.FileContext#create()

The following examples show how to use org.apache.hadoop.fs.FileContext#create() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestDefaultContainerExecutor.java    From hadoop with Apache License 2.0 6 votes vote down vote up
byte[] createTmpFile(Path dst, Random r, int len)
    throws IOException {
  // use unmodified local context
  FileContext lfs = FileContext.getLocalFSFileContext();
  dst = lfs.makeQualified(dst);
  lfs.mkdir(dst.getParent(), null, true);
  byte[] bytes = new byte[len];
  FSDataOutputStream out = null;
  try {
    out = lfs.create(dst, EnumSet.of(CREATE, OVERWRITE));
    r.nextBytes(bytes);
    out.write(bytes);
  } finally {
    if (out != null) out.close();
  }
  return bytes;
}
 
Example 2
Source File: LoadGeneratorMR.java    From hadoop with Apache License 2.0 6 votes vote down vote up
@Override
public void close() throws IOException {
  // Output the result to a file Results in the output dir
  FileContext fc;
  try {
    fc = FileContext.getFileContext(jobConf);
  } catch (IOException ioe) {
    System.err.println("Can not initialize the file system: " + 
        ioe.getLocalizedMessage());
    return;
  }
  FSDataOutputStream o = fc.create(FileOutputFormat.getTaskOutputPath(jobConf, "Results"),
      EnumSet.of(CreateFlag.CREATE));
     
  PrintStream out = new PrintStream(o);
  printResults(out);
  out.close();
  o.close();
}
 
Example 3
Source File: TestDefaultContainerExecutor.java    From big-c with Apache License 2.0 6 votes vote down vote up
byte[] createTmpFile(Path dst, Random r, int len)
    throws IOException {
  // use unmodified local context
  FileContext lfs = FileContext.getLocalFSFileContext();
  dst = lfs.makeQualified(dst);
  lfs.mkdir(dst.getParent(), null, true);
  byte[] bytes = new byte[len];
  FSDataOutputStream out = null;
  try {
    out = lfs.create(dst, EnumSet.of(CREATE, OVERWRITE));
    r.nextBytes(bytes);
    out.write(bytes);
  } finally {
    if (out != null) out.close();
  }
  return bytes;
}
 
Example 4
Source File: LoadGeneratorMR.java    From big-c with Apache License 2.0 6 votes vote down vote up
@Override
public void close() throws IOException {
  // Output the result to a file Results in the output dir
  FileContext fc;
  try {
    fc = FileContext.getFileContext(jobConf);
  } catch (IOException ioe) {
    System.err.println("Can not initialize the file system: " + 
        ioe.getLocalizedMessage());
    return;
  }
  FSDataOutputStream o = fc.create(FileOutputFormat.getTaskOutputPath(jobConf, "Results"),
      EnumSet.of(CreateFlag.CREATE));
     
  PrintStream out = new PrintStream(o);
  printResults(out);
  out.close();
  o.close();
}
 
Example 5
Source File: IOUtilsTest.java    From attic-apex-malhar with Apache License 2.0 6 votes vote down vote up
private void testCopyPartialHelper(int dataSize, int offset, long size) throws IOException
  {
    FileUtils.deleteQuietly(new File("target/IOUtilsTest"));
    File file = new File("target/IOUtilsTest/testCopyPartial/input");
    createDataFile(file, dataSize);

    FileContext fileContext = FileContext.getFileContext();
    DataInputStream inputStream = fileContext.open(new Path(file.getAbsolutePath()));

    Path output = new Path("target/IOUtilsTest/testCopyPartial/output");
    DataOutputStream outputStream = fileContext.create(output, EnumSet
        .of(CreateFlag.CREATE, CreateFlag.OVERWRITE), Options.CreateOpts.CreateParent.createParent());

    if (offset == 0) {
      IOUtils.copyPartial(inputStream, size, outputStream);
    } else {
      IOUtils.copyPartial(inputStream, offset, size, outputStream);
    }

    outputStream.close();

    Assert.assertTrue("output exists", fileContext.util().exists(output));
    Assert.assertEquals("output size", size, fileContext.getFileStatus(output).getLen());
//    FileUtils.deleteQuietly(new File("target/IOUtilsTest"));
  }
 
Example 6
Source File: TestFSDownload.java    From hadoop with Apache License 2.0 5 votes vote down vote up
static void createFile(FileContext files, Path p, int len, Random r)
    throws IOException {
  FSDataOutputStream out = null;
  try {
    byte[] bytes = new byte[len];
    out = files.create(p, EnumSet.of(CREATE, OVERWRITE));
    r.nextBytes(bytes);
    out.write(bytes);
  } finally {
    if (out != null) out.close();
  }
}
 
Example 7
Source File: TestListFilesInFileContext.java    From hadoop with Apache License 2.0 5 votes vote down vote up
private static void writeFile(FileContext fc, Path name, int fileSize)
throws IOException {
  // Create and write a file that contains three blocks of data
  FSDataOutputStream stm = fc.create(name, EnumSet.of(CreateFlag.CREATE),
      Options.CreateOpts.createParent());
  byte[] buffer = new byte[fileSize];
  Random rand = new Random(seed);
  rand.nextBytes(buffer);
  stm.write(buffer);
  stm.close();
}
 
Example 8
Source File: TestLogalyzer.java    From hadoop with Apache License 2.0 5 votes vote down vote up
/**
 * Create simple log file
 * 
 * @return
 * @throws IOException
 */

private Path createLogFile() throws IOException {

  FileContext files = FileContext.getLocalFSFileContext();

  Path ws = new Path(workSpace.getAbsoluteFile().getAbsolutePath());

  files.delete(ws, true);
  Path workSpacePath = new Path(workSpace.getAbsolutePath(), "log");
  files.mkdir(workSpacePath, null, true);

  LOG.info("create logfile.log");
  Path logfile1 = new Path(workSpacePath, "logfile.log");

  FSDataOutputStream os = files.create(logfile1,
      EnumSet.of(CreateFlag.CREATE));
  os.writeBytes("4 3" + EL + "1 3" + EL + "4 44" + EL);
  os.writeBytes("2 3" + EL + "1 3" + EL + "0 45" + EL);
  os.writeBytes("4 3" + EL + "1 3" + EL + "1 44" + EL);

  os.flush();
  os.close();
  LOG.info("create logfile1.log");

  Path logfile2 = new Path(workSpacePath, "logfile1.log");

  os = files.create(logfile2, EnumSet.of(CreateFlag.CREATE));
  os.writeBytes("4 3" + EL + "1 3" + EL + "3 44" + EL);
  os.writeBytes("2 3" + EL + "1 3" + EL + "0 45" + EL);
  os.writeBytes("4 3" + EL + "1 3" + EL + "1 44" + EL);

  os.flush();
  os.close();

  return workSpacePath;
}
 
Example 9
Source File: TestFSDownload.java    From big-c with Apache License 2.0 5 votes vote down vote up
static void createFile(FileContext files, Path p, int len, Random r)
    throws IOException {
  FSDataOutputStream out = null;
  try {
    byte[] bytes = new byte[len];
    out = files.create(p, EnumSet.of(CREATE, OVERWRITE));
    r.nextBytes(bytes);
    out.write(bytes);
  } finally {
    if (out != null) out.close();
  }
}
 
Example 10
Source File: TestListFilesInFileContext.java    From big-c with Apache License 2.0 5 votes vote down vote up
private static void writeFile(FileContext fc, Path name, int fileSize)
throws IOException {
  // Create and write a file that contains three blocks of data
  FSDataOutputStream stm = fc.create(name, EnumSet.of(CreateFlag.CREATE),
      Options.CreateOpts.createParent());
  byte[] buffer = new byte[fileSize];
  Random rand = new Random(seed);
  rand.nextBytes(buffer);
  stm.write(buffer);
  stm.close();
}
 
Example 11
Source File: TestLogalyzer.java    From big-c with Apache License 2.0 5 votes vote down vote up
/**
 * Create simple log file
 * 
 * @return
 * @throws IOException
 */

private Path createLogFile() throws IOException {

  FileContext files = FileContext.getLocalFSFileContext();

  Path ws = new Path(workSpace.getAbsoluteFile().getAbsolutePath());

  files.delete(ws, true);
  Path workSpacePath = new Path(workSpace.getAbsolutePath(), "log");
  files.mkdir(workSpacePath, null, true);

  LOG.info("create logfile.log");
  Path logfile1 = new Path(workSpacePath, "logfile.log");

  FSDataOutputStream os = files.create(logfile1,
      EnumSet.of(CreateFlag.CREATE));
  os.writeBytes("4 3" + EL + "1 3" + EL + "4 44" + EL);
  os.writeBytes("2 3" + EL + "1 3" + EL + "0 45" + EL);
  os.writeBytes("4 3" + EL + "1 3" + EL + "1 44" + EL);

  os.flush();
  os.close();
  LOG.info("create logfile1.log");

  Path logfile2 = new Path(workSpacePath, "logfile1.log");

  os = files.create(logfile2, EnumSet.of(CreateFlag.CREATE));
  os.writeBytes("4 3" + EL + "1 3" + EL + "3 44" + EL);
  os.writeBytes("2 3" + EL + "1 3" + EL + "0 45" + EL);
  os.writeBytes("4 3" + EL + "1 3" + EL + "1 44" + EL);

  os.flush();
  os.close();

  return workSpacePath;
}
 
Example 12
Source File: FSRecoveryHandler.java    From Bats with Apache License 2.0 4 votes vote down vote up
@Override
public Object restore() throws IOException
{
  FileContext fc = FileContext.getFileContext(fs.getUri());

  // recover from wherever it was left
  if (fc.util().exists(snapshotBackupPath)) {
    LOG.warn("Incomplete checkpoint, reverting to {}", snapshotBackupPath);
    fc.rename(snapshotBackupPath, snapshotPath, Rename.OVERWRITE);

    // combine logs (w/o append, create new file)
    Path tmpLogPath = new Path(basedir, "log.combined");
    try (FSDataOutputStream fsOut = fc.create(tmpLogPath, EnumSet.of(CreateFlag.CREATE, CreateFlag.OVERWRITE))) {
      try (FSDataInputStream fsIn = fc.open(logBackupPath)) {
        IOUtils.copy(fsIn, fsOut);
      }

      try (FSDataInputStream fsIn = fc.open(logPath)) {
        IOUtils.copy(fsIn, fsOut);
      }
    }

    fc.rename(tmpLogPath, logPath, Rename.OVERWRITE);
    fc.delete(logBackupPath, false);
  } else {
    // we have log backup, but no checkpoint backup
    // failure between log rotation and writing checkpoint
    if (fc.util().exists(logBackupPath)) {
      LOG.warn("Found {}, did checkpointing fail?", logBackupPath);
      fc.rename(logBackupPath, logPath, Rename.OVERWRITE);
    }
  }

  if (!fc.util().exists(snapshotPath)) {
    LOG.debug("No existing checkpoint.");
    return null;
  }

  LOG.debug("Reading checkpoint {}", snapshotPath);
  InputStream is = fc.open(snapshotPath);
  // indeterministic class loading behavior
  // http://stackoverflow.com/questions/9110677/readresolve-not-working-an-instance-of-guavas-serializedform-appears
  final ClassLoader loader = Thread.currentThread().getContextClassLoader();
  try (ObjectInputStream ois = new ObjectInputStream(is)
  {
    @Override
    protected Class<?> resolveClass(ObjectStreamClass objectStreamClass)
        throws IOException, ClassNotFoundException
    {
      return Class.forName(objectStreamClass.getName(), true, loader);
    }
  }) {
    return ois.readObject();
  } catch (ClassNotFoundException cnfe) {
    throw new IOException("Failed to read checkpointed state", cnfe);
  }
}
 
Example 13
Source File: FSJsonLineFile.java    From Bats with Apache License 2.0 4 votes vote down vote up
public FSJsonLineFile(FileContext fileContext, Path path, FsPermission permission) throws IOException
{
  this.os = fileContext.create(path, EnumSet.of(CreateFlag.CREATE, CreateFlag.APPEND), Options.CreateOpts.perms(permission));
  this.objectMapper = (new JSONSerializationProvider()).getContext(null);
}
 
Example 14
Source File: FSRecoveryHandler.java    From attic-apex-core with Apache License 2.0 4 votes vote down vote up
@Override
public Object restore() throws IOException
{
  FileContext fc = FileContext.getFileContext(fs.getUri());

  // recover from wherever it was left
  if (fc.util().exists(snapshotBackupPath)) {
    LOG.warn("Incomplete checkpoint, reverting to {}", snapshotBackupPath);
    fc.rename(snapshotBackupPath, snapshotPath, Rename.OVERWRITE);

    // combine logs (w/o append, create new file)
    Path tmpLogPath = new Path(basedir, "log.combined");
    try (FSDataOutputStream fsOut = fc.create(tmpLogPath, EnumSet.of(CreateFlag.CREATE, CreateFlag.OVERWRITE))) {
      try (FSDataInputStream fsIn = fc.open(logBackupPath)) {
        IOUtils.copy(fsIn, fsOut);
      }

      try (FSDataInputStream fsIn = fc.open(logPath)) {
        IOUtils.copy(fsIn, fsOut);
      }
    }

    fc.rename(tmpLogPath, logPath, Rename.OVERWRITE);
    fc.delete(logBackupPath, false);
  } else {
    // we have log backup, but no checkpoint backup
    // failure between log rotation and writing checkpoint
    if (fc.util().exists(logBackupPath)) {
      LOG.warn("Found {}, did checkpointing fail?", logBackupPath);
      fc.rename(logBackupPath, logPath, Rename.OVERWRITE);
    }
  }

  if (!fc.util().exists(snapshotPath)) {
    LOG.debug("No existing checkpoint.");
    return null;
  }

  LOG.debug("Reading checkpoint {}", snapshotPath);
  InputStream is = fc.open(snapshotPath);
  // indeterministic class loading behavior
  // http://stackoverflow.com/questions/9110677/readresolve-not-working-an-instance-of-guavas-serializedform-appears
  final ClassLoader loader = Thread.currentThread().getContextClassLoader();
  try (ObjectInputStream ois = new ObjectInputStream(is)
  {
    @Override
    protected Class<?> resolveClass(ObjectStreamClass objectStreamClass)
        throws IOException, ClassNotFoundException
    {
      return Class.forName(objectStreamClass.getName(), true, loader);
    }
  }) {
    return ois.readObject();
  } catch (ClassNotFoundException cnfe) {
    throw new IOException("Failed to read checkpointed state", cnfe);
  }
}
 
Example 15
Source File: FSJsonLineFile.java    From attic-apex-core with Apache License 2.0 4 votes vote down vote up
public FSJsonLineFile(FileContext fileContext, Path path, FsPermission permission) throws IOException
{
  this.os = fileContext.create(path, EnumSet.of(CreateFlag.CREATE, CreateFlag.APPEND), Options.CreateOpts.perms(permission));
  this.objectMapper = (new JSONSerializationProvider()).getContext(null);
}