org.apache.commons.vfs2.AllFileSelector Java Examples

The following examples show how to use org.apache.commons.vfs2.AllFileSelector. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: DistributedCacheUtilImplOSDependentTest.java    From pentaho-hadoop-shims with Apache License 2.0 6 votes vote down vote up
@Test
@SuppressWarnings( "squid:S2699" ) // assertions made in utility method
public void stageForCache() throws Exception {
  DistributedCacheUtilImpl ch = new DistributedCacheUtilImpl();

  // Copy the contents of test folder
  FileObject source = DistributedCacheTestUtil.createTestFolderWithContent();

  try {
    Path root = new Path( "bin/test/stageArchiveForCacheTest" );
    Path dest = new Path( root, "org/pentaho/mapreduce/" );

    Configuration conf = new Configuration();
    FileSystem fs = DistributedCacheTestUtil.getLocalFileSystem( conf );

    DistributedCacheTestUtil.stageForCacheTester( ch, source, fs, root, dest, 6, 9 );
  } finally {
    source.delete( new AllFileSelector() );
  }
}
 
Example #2
Source File: DistributedCacheUtilImplOSDependentTest.java    From pentaho-hadoop-shims with Apache License 2.0 6 votes vote down vote up
@Test
public void stageForCache_destination_exists() throws Exception {
  DistributedCacheUtilImpl ch = new DistributedCacheUtilImpl();

  Configuration conf = new Configuration();
  FileSystem fs = DistributedCacheTestUtil.getLocalFileSystem( conf );

  FileObject source = DistributedCacheTestUtil.createTestFolderWithContent();
  try {
    Path root = new Path( "bin/test/stageForCache_destination_exists" );
    Path dest = new Path( root, "dest" );

    fs.mkdirs( dest );
    assertTrue( fs.exists( dest ) );
    assertTrue( fs.getFileStatus( dest ).isDir() );

    DistributedCacheTestUtil.stageForCacheTester( ch, source, fs, root, dest, 6, 9 );
  } finally {
    source.delete( new AllFileSelector() );
  }
}
 
Example #3
Source File: DistributedCacheUtilImplOSDependentTest.java    From pentaho-hadoop-shims with Apache License 2.0 6 votes vote down vote up
@Test
public void stagePluginsForCache() throws Exception {
  DistributedCacheUtilImpl ch = new DistributedCacheUtilImpl();

  Configuration conf = new Configuration();
  FileSystem fs = DistributedCacheTestUtil.getLocalFileSystem( conf );

  Path pluginsDir = new Path( "bin/test/plugins-installation-dir" );

  FileObject pluginDir = DistributedCacheTestUtil.createTestFolderWithContent();

  try {
    ch.stagePluginsForCache( fs, pluginsDir, "bin/test/sample-folder" );
    Path pluginInstallPath = new Path( pluginsDir, "bin/test/sample-folder" );
    assertTrue( fs.exists( pluginInstallPath ) );
    ContentSummary summary = fs.getContentSummary( pluginInstallPath );
    assertEquals( 6, summary.getFileCount() );
    assertEquals( 9, summary.getDirectoryCount() );
  } finally {
    pluginDir.delete( new AllFileSelector() );
    fs.delete( pluginsDir, true );
  }
}
 
Example #4
Source File: ResourceService.java    From spoofax with Apache License 2.0 6 votes vote down vote up
@Override public File localFile(FileObject resource, FileObject dir) {
    if(resource instanceof LocalFile) {
        return FileUtils.toFile(resource);
    }

    final File localDir = localPath(dir);
    if(localDir == null) {
        throw new MetaborgRuntimeException("Replication directory " + dir
            + " is not on the local filesystem, cannot get local file for " + resource);
    }
    try {
        dir.createFolder();

        final FileObject copyLoc;
        if(resource.getType() == FileType.FOLDER) {
            copyLoc = dir;
        } else {
            copyLoc = ResourceUtils.resolveFile(dir, resource.getName().getBaseName());
        }
        copyLoc.copyFrom(resource, new AllFileSelector());

        return localDir;
    } catch(FileSystemException e) {
        throw new MetaborgRuntimeException("Could not get local file for " + resource, e);
    }
}
 
Example #5
Source File: DistributedCacheUtilImplOSDependentTest.java    From pentaho-hadoop-shims with Apache License 2.0 6 votes vote down vote up
@Test
public void installKettleEnvironment() throws Exception {
  DistributedCacheUtilImpl ch = new DistributedCacheUtilImpl();

  Configuration conf = new Configuration();
  FileSystem fs = DistributedCacheTestUtil.getLocalFileSystem( conf );

  // This "empty pmr" contains a lib/ folder but with no content
  FileObject pmrArchive = KettleVFS.getFileObject( getClass().getResource( "/empty-pmr.zip" ).toURI().getPath() );

  FileObject bigDataPluginDir = DistributedCacheTestUtil
    .createTestFolderWithContent( DistributedCacheUtilImpl.PENTAHO_BIG_DATA_PLUGIN_FOLDER_NAME );

  Path root = new Path( "bin/test/installKettleEnvironment" );
  System.setProperty("karaf.home", "bin/test/installKettleEnvironment/system/karaf" );
  try {
    ch.installKettleEnvironment( pmrArchive, fs, root, bigDataPluginDir, null, "" );
    assertTrue( ch.isKettleEnvironmentInstalledAt( fs, root ) );
  } finally {
    bigDataPluginDir.delete( new AllFileSelector() );
    fs.delete( root, true );
  }
}
 
Example #6
Source File: DistributedCacheUtilImplOSDependentTest.java    From pentaho-hadoop-shims with Apache License 2.0 6 votes vote down vote up
@Test
public void installKettleEnvironment_additional_plugins() throws Exception {
  DistributedCacheUtilImpl ch = new DistributedCacheUtilImpl();

  Configuration conf = new Configuration();
  FileSystem fs = DistributedCacheTestUtil.getLocalFileSystem( conf );

  // This "empty pmr" contains a lib/ folder but with no content
  FileObject pmrArchive = KettleVFS.getFileObject( getClass().getResource( "/empty-pmr.zip" ).toURI().getPath() );
  FileObject bigDataPluginDir = DistributedCacheTestUtil
    .createTestFolderWithContent( DistributedCacheUtilImpl.PENTAHO_BIG_DATA_PLUGIN_FOLDER_NAME );

  String pluginName = "additional-plugin";
  FileObject additionalPluginDir = DistributedCacheTestUtil.createTestFolderWithContent( pluginName );
  Path root = new Path( "bin/test/installKettleEnvironment" );
  try {
    ch.installKettleEnvironment( pmrArchive, fs, root, bigDataPluginDir, "bin/test/" + pluginName, "" );
    assertTrue( ch.isKettleEnvironmentInstalledAt( fs, root ) );
    assertTrue( fs.exists( new Path( root, "plugins/bin/test/" + pluginName ) ) );
  } finally {
    bigDataPluginDir.delete( new AllFileSelector() );
    additionalPluginDir.delete( new AllFileSelector() );
    fs.delete( root, true );
  }
}
 
Example #7
Source File: DistributedCacheUtilImplTest.java    From pentaho-hadoop-shims with Apache License 2.0 6 votes vote down vote up
@Test
public void extractToTemp() throws Exception {
  DistributedCacheUtilImpl ch = new DistributedCacheUtilImpl();

  FileObject archive =
    KettleVFS.getFileObject( getClass().getResource( "/pentaho-mapreduce-sample.jar" ).toURI().getPath() );
  FileObject extracted = ch.extractToTemp( archive );

  assertNotNull( extracted );
  assertTrue( extracted.exists() );
  try {
    // There should be 3 files and 5 directories inside the root folder (which is the 9th entry)
    assertTrue( extracted.findFiles( new AllFileSelector() ).length == 9 );
  } finally {
    // clean up after ourself
    ch.deleteDirectory( extracted );
  }
}
 
Example #8
Source File: DistributedCacheUtilImplTest.java    From pentaho-hadoop-shims with Apache License 2.0 6 votes vote down vote up
@Test
public void findFiles_vfs() throws Exception {
  DistributedCacheUtilImpl ch = new DistributedCacheUtilImpl();

  FileObject testFolder = DistributedCacheTestUtil.createTestFolderWithContent();

  try {
    // Simply test we can find the jar files in our test folder
    List<String> jars = ch.findFiles( testFolder, "jar" );
    assertEquals( 4, jars.size() );

    // Look for all files and folders
    List<String> all = ch.findFiles( testFolder, null );
    assertEquals( 15, all.size() );
  } finally {
    testFolder.delete( new AllFileSelector() );
  }
}
 
Example #9
Source File: DistributedCacheUtilImplTest.java    From pentaho-hadoop-shims with Apache License 2.0 6 votes vote down vote up
@Test
public void stageForCache_destination_no_overwrite() throws Exception {
  DistributedCacheUtilImpl ch = new DistributedCacheUtilImpl();

  Configuration conf = new Configuration();
  FileSystem fs = DistributedCacheTestUtil.getLocalFileSystem( conf );

  FileObject source = DistributedCacheTestUtil.createTestFolderWithContent();
  try {
    Path root = new Path( "bin/test/stageForCache_destination_exists" );
    Path dest = new Path( root, "dest" );

    fs.mkdirs( dest );
    assertTrue( fs.exists( dest ) );
    assertTrue( fs.getFileStatus( dest ).isDir() );
    try {
      ch.stageForCache( source, fs, dest, false );
    } catch ( KettleFileException ex ) {
      assertTrue( ex.getMessage(), ex.getMessage().contains( "Destination exists" ) );
    } finally {
      fs.delete( root, true );
    }
  } finally {
    source.delete( new AllFileSelector() );
  }
}
 
Example #10
Source File: JobEntryTalendJobExec.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
private URL[] prepareJarFiles( FileObject zipFile ) throws Exception {

    // zip:file:///tmp/foo.zip
    FileInputList fileList = FileInputList.createFileList( this, new String[] { "zip:" + zipFile.toString(), },
      new String[] { ".*\\.jar$", }, // Include mask: only jar files
      new String[] { ".*classpath\\.jar$", }, // Exclude mask: only jar files
      new String[] { "Y", }, // File required
      new boolean[] { true, } ); // Search sub-directories

    List<URL> files = new ArrayList<URL>();

    // Copy the jar files in the temp folder...
    //
    for ( FileObject file : fileList.getFiles() ) {
      FileObject jarfilecopy =
        KettleVFS.createTempFile(
          file.getName().getBaseName(), ".jar", environmentSubstitute( "${java.io.tmpdir}" ) );
      jarfilecopy.copyFrom( file, new AllFileSelector() );
      files.add( jarfilecopy.getURL() );
    }

    return files.toArray( new URL[files.size()] );
  }
 
Example #11
Source File: ResourceAgent.java    From spoofax with Apache License 2.0 5 votes vote down vote up
@Override public boolean rmdir(String dn) {
    try {
        final FileObject resource = resourceService.resolve(workingDir, dn);
        return resource.delete(new AllFileSelector()) > 0;
    } catch(FileSystemException e) {
        throw new RuntimeException("Could not delete directory " + dn, e);
    }
}
 
Example #12
Source File: ResourceService.java    From spoofax with Apache License 2.0 5 votes vote down vote up
@Override public File localFile(FileObject resource) {
    if(resource instanceof LocalFile) {
        return FileUtils.toFile(resource);
    }

    try {
        return resource.getFileSystem().replicateFile(resource, new AllFileSelector());
    } catch(FileSystemException e) {
        throw new MetaborgRuntimeException("Could not get local file for " + resource, e);
    }
}
 
Example #13
Source File: LanguageSpecBuilder.java    From spoofax with Apache License 2.0 5 votes vote down vote up
private void cleanAndLog(FileObject dir) {
    logger.info("Deleting {}", dir);
    try {
        dir.delete(new AllFileSelector());
    } catch(FileSystemException e) {
        logger.error("Could not delete {}", e, dir);
    }
}
 
Example #14
Source File: CustomRamProviderTest.java    From commons-vfs with Apache License 2.0 5 votes vote down vote up
/**
 * Test some special file name symbols.
 * <p>
 * Use the RamProvider since it has no character limitations like
 * the (Windows) LocalFileProvider.
 */
@Test
public void testSpecialName() throws FileSystemException
{
    // we test with this file name
    // does not work with '!'
    final String testDir = "/spacialtest/";
    final String testFileName = "test:+-_ \"()<>%#.txt";
    final String expectedName = testDir + testFileName;

    final FileObject dir = prepareSpecialFile(testDir, testFileName);


    // DO: verify you can list it:
    final FileObject[] findFilesResult = dir.findFiles(new AllFileSelector()); // includes dir
    final FileObject[] getChildrenResult = dir.getChildren();
    final FileObject getChildResult = dir.getChild(UriParser.encode(testFileName, ENC));


    // validate findFiles returns expected result
    assertEquals("Unexpected result findFiles: " + Arrays.toString(findFilesResult), 2, findFilesResult.length);
    String resultName = findFilesResult[0].getName().getPathDecoded();
    assertEquals("findFiles Child name does not match", expectedName, resultName);
    assertEquals("Did findFiles but child was no file", FileType.FILE, findFilesResult[0].getType());

    // validate getChildren returns expected result
    assertEquals("Unexpected result getChildren: " + Arrays.toString(getChildrenResult), 1, getChildrenResult.length);
    resultName = getChildrenResult[0].getName().getPathDecoded();
    assertEquals("getChildren Child name does not match", expectedName, resultName);
    assertEquals("Did getChildren but child was no file", FileType.FILE, getChildrenResult[0].getType());

    // validate getChild returns expected child
    assertNotNull("Did not find direct child", getChildResult);
    resultName = getChildResult.getName().getPathDecoded();
    assertEquals("getChild name does not match", expectedName, resultName);
    assertEquals("getChild was no file", FileType.FILE, getChildResult.getType());
}
 
Example #15
Source File: DistributedCacheUtilImplOSDependentTest.java    From pentaho-hadoop-shims with Apache License 2.0 5 votes vote down vote up
@Test
public void findFiles_hdfs_native() throws Exception {
  DistributedCacheUtilImpl ch = new DistributedCacheUtilImpl();

  // Copy the contents of test folder
  FileObject source = DistributedCacheTestUtil.createTestFolderWithContent();
  Path root = new Path( "bin/test/stageArchiveForCacheTest" );
  Configuration conf = new Configuration();
  FileSystem fs = DistributedCacheTestUtil.getLocalFileSystem( conf );
  Path dest = new Path( root, "org/pentaho/mapreduce/" );
  try {
    try {
      ch.stageForCache( source, fs, dest, true );

      List<Path> files = ch.findFiles( fs, dest, null );
      assertEquals( 6, files.size() );

      files = ch.findFiles( fs, dest, Pattern.compile( ".*jar$" ) );
      assertEquals( 2, files.size() );

      files = ch.findFiles( fs, dest, Pattern.compile( ".*folder$" ) );
      assertEquals( 1, files.size() );
    } finally {
      fs.delete( root, true );
    }
  } finally {
    source.delete( new AllFileSelector() );
  }
}
 
Example #16
Source File: CustomRamProviderTest.java    From commons-vfs with Apache License 2.0 4 votes vote down vote up
/**
 * Test if listing files with known scheme prefix works.
 * <p>
 * This test is not RamProvider specific but it uses it as a simple test-bed.
 * Verifies VFS-741.
 */
@Test
public void testSchemePrefix() throws FileSystemException
{
    // use a :-prefix with a known scheme (unknown scheme works since VFS-398)
    final String KNOWN_SCHEME = manager.getSchemes()[0]; // typically "ram"

    // we test with this file name
    final String testDir = "/prefixtest/";
    final String testFileName = KNOWN_SCHEME + ":test:txt";
    final String expectedName = testDir + testFileName;

    final FileObject dir = prepareSpecialFile(testDir, testFileName);


    // verify we can list dir

    // if not it throws:
    // Caused by: org.apache.commons.vfs2.FileSystemException: Invalid descendent file name "ram:data:test.txt".
    //   at org.apache.commons.vfs2.impl.DefaultFileSystemManager.resolveName
    //   at org.apache.commons.vfs2.provider.AbstractFileObject.getChildren
    //   at org.apache.commons.vfs2.provider.AbstractFileObject.traverse
    //   at org.apache.commons.vfs2.provider.AbstractFileObject.findFiles

    // test methods to get the child:
    final FileObject[] findFilesResult = dir.findFiles(new AllFileSelector()); // includes dir
    final FileObject[] getChildrenResult = dir.getChildren();
    final FileObject getChildResult = dir.getChild(testFileName);

    // validate findFiles returns expected result
    assertEquals("Unexpected result findFiles: " + Arrays.toString(findFilesResult), 2, findFilesResult.length);
    String resultName = findFilesResult[0].getName().getPathDecoded();
    assertEquals("findFiles Child name does not match", expectedName, resultName);
    assertEquals("Did findFiles but child was no file", FileType.FILE, findFilesResult[0].getType());

    // validate getChildren returns expected result
    assertEquals("Unexpected result getChildren: " + Arrays.toString(getChildrenResult), 1, getChildrenResult.length);
    resultName = getChildrenResult[0].getName().getPathDecoded();
    assertEquals("getChildren Child name does not match", expectedName, resultName);
    assertEquals("Did getChildren but child was no file", FileType.FILE, getChildrenResult[0].getType());

    // validate getChild returns expected child
    assertNotNull("Did not find direct child", getChildResult);
    resultName = getChildResult.getName().getPathDecoded();
    assertEquals("getChild name does not match", expectedName, resultName);
    assertEquals("getChild was no file", FileType.FILE, getChildResult.getType());
}
 
Example #17
Source File: DistributedCacheUtilImplTest.java    From pentaho-hadoop-shims with Apache License 2.0 4 votes vote down vote up
@Test
public void findFiles_vfs_hdfs() throws Exception {

  DistributedCacheUtilImpl ch = new DistributedCacheUtilImpl();

  URL url = new URL( "http://localhost:8020/path/to/file" );
  Configuration conf = mock( Configuration.class );
  FileSystem fs = mock( FileSystem.class );
  FileObject source = mock( FileObject.class );
  Path dest = mock( Path.class );
  FileObject hdfsDest = mock( FileObject.class );
  Path root = mock( Path.class );

  FileObject[] fileObjects = new FileObject[ 12 ];
  for ( int i = 0; i < fileObjects.length; i++ ) {
    URL fileUrl = new URL( "http://localhost:8020/path/to/file/" + i );
    FileObject fileObject = mock( FileObject.class );
    fileObjects[ i ] = fileObject;
    doReturn( fileUrl ).when( fileObject ).getURL();
  }

  doReturn( url ).when( source ).getURL();
  doReturn( conf ).when( fs ).getConf();
  doReturn( 0 ).when( conf ).getInt( any( String.class ), anyInt() );
  doReturn( true ).when( source ).exists();
  doReturn( fileObjects ).when( hdfsDest ).findFiles( any( FileSelector.class ) );
  doReturn( true ).when( fs ).delete( root, true );
  doReturn( fileObjects.length ).when( source ).delete( any( AllFileSelector.class ) );
  doNothing().when( fs ).copyFromLocalFile( any( Path.class ), any( Path.class ) );
  doNothing().when( fs ).setPermission( any( Path.class ), any( FsPermission.class ) );
  doReturn( true ).when( fs ).setReplication( any( Path.class ), anyShort() );

  try {
    try {
      ch.stageForCache( source, fs, dest, true );

      List<String> files = ch.findFiles( hdfsDest, null );
      assertEquals( 12, files.size() );
    } finally {
      fs.delete( root, true );
    }
  } finally {
    source.delete( new AllFileSelector() );
  }
}
 
Example #18
Source File: DistributedCacheUtilImplOSDependentTest.java    From pentaho-hadoop-shims with Apache License 2.0 4 votes vote down vote up
@Test
public void configureWithPmr() throws Exception {
  DistributedCacheUtilImpl ch = new DistributedCacheUtilImpl();

  Configuration conf = new Configuration();
  FileSystem fs = DistributedCacheTestUtil.getLocalFileSystem( conf );

  // This "empty pmr" contains a lib/ folder and some empty kettle-*.jar files but no actual content
  FileObject pmrArchive = KettleVFS.getFileObject( getClass().getResource( "/empty-pmr.zip" ).toURI().getPath() );

  FileObject bigDataPluginDir = DistributedCacheTestUtil
    .createTestFolderWithContent( DistributedCacheUtilImpl.PENTAHO_BIG_DATA_PLUGIN_FOLDER_NAME );

  Path root = new Path( "bin/test/installKettleEnvironment" );
  try {
    ch.installKettleEnvironment( pmrArchive, fs, root, bigDataPluginDir, null, "test-config" );
    assertTrue( ch.isKettleEnvironmentInstalledAt( fs, root ) );

    ch.configureWithKettleEnvironment( conf, fs, root );

    // Make sure our libraries are on the classpathi
    assertTrue( conf.get( "mapred.cache.files" ).contains( "lib/kettle-core.jar" ) );
    assertTrue( conf.get( "mapred.cache.files" ).contains( "lib/kettle-engine.jar" ) );
    assertTrue( conf.get( "mapred.job.classpath.files" ).contains( "lib/kettle-core.jar" ) );
    assertTrue( conf.get( "mapred.job.classpath.files" ).contains( "lib/kettle-engine.jar" ) );

    // Make sure the configuration specific jar made it!
    assertTrue( conf.get( "mapred.cache.files" ).contains( "lib/configuration-specific.jar" ) );

    // Make sure our plugins folder is registered
    assertTrue( conf.get( "mapred.cache.files" ).contains( "#plugins" ) );

    // Make sure our libraries aren't included twice
    assertFalse( conf.get( "mapred.cache.files" ).contains( "#lib" ) );

    // We should not have individual files registered
    assertFalse( conf.get( "mapred.cache.files" ).contains( "pentaho-big-data-plugin/jar1.jar" ) );
    assertFalse( conf.get( "mapred.cache.files" ).contains( "pentaho-big-data-plugin/jar2.jar" ) );
    assertFalse( conf.get( "mapred.cache.files" ).contains( "pentaho-big-data-plugin/folder/file.txt" ) );

  } finally {
    bigDataPluginDir.delete( new AllFileSelector() );
    fs.delete( root, true );
  }
}
 
Example #19
Source File: ResourceUtils.java    From spoofax with Apache License 2.0 4 votes vote down vote up
public static Iterable<FileObject> find(FileObject base) throws FileSystemException {
    return find(base, new AllFileSelector());
}
 
Example #20
Source File: DistributedCacheUtilImpl.java    From pentaho-hadoop-shims with Apache License 2.0 2 votes vote down vote up
/**
 * Delete a directory and all of its contents
 *
 * @param dir Directory to delete
 * @return True if the directory was deleted successfully
 */
public boolean deleteDirectory( FileObject dir ) throws FileSystemException {
  dir.delete( new AllFileSelector() );
  return !dir.exists();
}