Java Code Examples for org.apache.commons.io.FileUtils#iterateFiles()

The following examples show how to use org.apache.commons.io.FileUtils#iterateFiles() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: FileBitPatternInfoReader.java    From ghidra with Apache License 2.0 6 votes vote down vote up
/**
 * Constructor used for testing
 * @param xmlDir
 */
FileBitPatternInfoReader(File xmlDir) {
	if (!xmlDir.isDirectory()) {
		throw new IllegalArgumentException(xmlDir.getName() + " is not a directory");
	}

	startingAddresses = new ArrayList<Long>();
	fInfoList = new ArrayList<FunctionBitPatternInfo>();
	registerExtent = new ContextRegisterExtent();
	params = null;

	Iterator<File> dataFiles = FileUtils.iterateFiles(xmlDir, null, false);
	while (dataFiles.hasNext()) {
		File dataFile = dataFiles.next();
		processXmlFile(dataFile);
	}
}
 
Example 2
Source File: GemFireXDDataExtractorJUnit.java    From gemfirexd-oss with Apache License 2.0 6 votes vote down vote up
public void DISABLEtestCSVExportSingleNodeReplicateOplogKrfCorrupt() throws Exception {
  String tableName = "REPLICATED_TABLE";
  this.createDiskStore(testDiskStoreName);
  this.createPersistentReplicateTable(tableName, testDiskStoreName);
  this.insertData(tableName, 0, 10000);
  this.updateData(tableName, 0, 300);
  this.deleteData(tableName, 300, 600);
  
  copyDataDictionary();
  copyOplogs();
  
  Iterator<File> oplogIterator = FileUtils.iterateFiles(oplogCopy, new String[]{"krf"}, true);
  while (oplogIterator.hasNext()) {
    corruptFile(500, (File)oplogIterator.next());
  }
  GemFireXDDataExtractorImpl extractor = new GemFireXDDataExtractorImpl();
  extractor.createTestConnection();
  extractor.retrieveAllRowFormatters();
  
  List<String> diskStoreList = new ArrayList<String>();
  diskStoreList.add(oplogCopy.getCanonicalPath());
  List<GFXDSnapshotExportStat> stats = extractor.extractDiskStores(testDiskStoreName, diskStoreList, outputDir.getCanonicalPath());
  GFXDSnapshotExportStat stat = stats.get(0);
  assertEquals(1, stats.size());
  assertTrue(stat.isCorrupt());
}
 
Example 3
Source File: MetaCodeGenService.java    From youran with Apache License 2.0 6 votes vote down vote up
/**
 * 对比原目录下文件与目标目录,并覆盖
 *
 * @param sourceDir
 * @param targetDir
 * @throws IOException
 */
private void compareAndCoverFile(File sourceDir, File targetDir) throws IOException {
    String sourcePath = sourceDir.getPath();
    String targetPath = targetDir.getPath();
    LOGGER.debug("sourcePath={}", sourcePath);
    LOGGER.debug("targetPath={}", targetPath);
    Iterator<File> fileIterator = FileUtils.iterateFiles(sourceDir, null, true);
    while (fileIterator.hasNext()) {
        File file = fileIterator.next();
        String path = file.getPath();
        String relativePath = path.substring(sourcePath.length());
        File targetFile = new File(targetPath + relativePath);
        if (!targetFile.exists()) {
            LOGGER.debug("目标文件不存在={}", targetPath + relativePath);
            this.doCover(targetFile, file);
        } else if (!compareFile(targetFile, file)) {
            LOGGER.debug("文件内容不相等={}", targetPath + relativePath);
            this.doCover(targetFile, file);
        }
    }

}
 
Example 4
Source File: GemFireXDDataExtractorJUnit.java    From gemfirexd-oss with Apache License 2.0 6 votes vote down vote up
public void testCSVExportSingleNodeReplicateOplogCrfDelete() throws Exception {
  String tableName = "REPLICATED_TABLE";
  this.createDiskStore(testDiskStoreName);
  this.createPersistentReplicateTable(tableName, testDiskStoreName);
  this.insertData(tableName, 0, 10000);
  this.updateData(tableName, 0, 300);
  this.deleteData(tableName, 300, 600);
  
  copyDataDictionary();
  copyOplogs();
  
  Iterator<File> oplogIterator = FileUtils.iterateFiles(oplogCopy, new String[]{"crf"}, true);
  while (oplogIterator.hasNext()) {
    this.deleteFile((File)oplogIterator.next());
  }
  
  GemFireXDDataExtractorImpl extractor = new GemFireXDDataExtractorImpl();
  extractor.createTestConnection();
  extractor.retrieveAllRowFormatters();
  List<String> diskStoreList = new ArrayList<String>();
  diskStoreList.add(oplogCopy.getCanonicalPath());
  List<GFXDSnapshotExportStat> stats = extractor.extractDiskStores(testDiskStoreName, diskStoreList, outputDir.getCanonicalPath());
  assertEquals(0, stats.size());
}
 
Example 5
Source File: HadoopClientServicesImpl.java    From pentaho-hadoop-shims with Apache License 2.0 6 votes vote down vote up
private String getSqoopJarLocation( Configuration c ) {

    StringBuilder sb = new StringBuilder();

    for ( String bundleFileLocation : sqoopBundleFileLocations ) {
      File filesInsideBundle = new File( bundleFileLocation );
      Iterator<File> filesIterator = FileUtils.iterateFiles( filesInsideBundle, new String[] { "jar" }, true );

      while ( filesIterator.hasNext() ) {
        File file = filesIterator.next();
        String name = file.getName();
        if ( name.startsWith( "sqoop" ) ) {
          sb.append( file.getAbsolutePath() );
        }
      }
    }

    try {
      FileSystem fs = FileSystem.getLocal( ShimUtils.asConfiguration( c ) );
      return new Path( sb.toString() ).makeQualified( fs ).toString();
    } catch ( IOException e ) {
      e.printStackTrace();
    }
    return sb.toString();
  }
 
Example 6
Source File: GemFireXDDataExtractorJUnit.java    From gemfirexd-oss with Apache License 2.0 6 votes vote down vote up
public void testCSVExportSingleNodeReplicateOplogCrfDelete() throws Exception {
  String tableName = "REPLICATED_TABLE";
  this.createDiskStore(testDiskStoreName);
  this.createPersistentReplicateTable(tableName, testDiskStoreName);
  this.insertData(tableName, 0, 10000);
  this.updateData(tableName, 0, 300);
  this.deleteData(tableName, 300, 600);
  
  copyDataDictionary();
  copyOplogs();
  
  Iterator<File> oplogIterator = FileUtils.iterateFiles(oplogCopy, new String[]{"crf"}, true);
  while (oplogIterator.hasNext()) {
    this.deleteFile((File)oplogIterator.next());
  }
  
  GemFireXDDataExtractorImpl extractor = new GemFireXDDataExtractorImpl();
  extractor.createTestConnection();
  extractor.retrieveAllRowFormatters();
  List<String> diskStoreList = new ArrayList<String>();
  diskStoreList.add(oplogCopy.getCanonicalPath());
  List<GFXDSnapshotExportStat> stats = extractor.extractDiskStores(testDiskStoreName, diskStoreList, outputDir.getCanonicalPath());
  assertEquals(0, stats.size());
}
 
Example 7
Source File: IOHandler.java    From pdf-unstamper with GNU General Public License v3.0 6 votes vote down vote up
public static Iterator<File> getCopiedFiles(
        @NotNull String idn,
        @NotNull String odn,
        @NotNull boolean recursive)
        throws IOException, FileNameDuplicateException {
    File dirI = new File(idn);
    File dirO = new File(odn);

    if (dirI.getCanonicalPath().equals(dirO.getCanonicalPath())) {
        throw new FileNameDuplicateException();
    } else if (dirI.exists() && dirI.isDirectory()) {
        FileUtils.copyDirectory(dirI, dirO);
        return FileUtils.iterateFiles(dirO, new String[]{"pdf"}, recursive);
    } else
        return null;
}
 
Example 8
Source File: GemFireXDDataExtractorJUnit.java    From gemfirexd-oss with Apache License 2.0 6 votes vote down vote up
public void DISABLEtestCSVExportSingleNodeReplicateOplogKrfCorrupt() throws Exception {
  String tableName = "REPLICATED_TABLE";
  this.createDiskStore(testDiskStoreName);
  this.createPersistentReplicateTable(tableName, testDiskStoreName);
  this.insertData(tableName, 0, 10000);
  this.updateData(tableName, 0, 300);
  this.deleteData(tableName, 300, 600);
  
  copyDataDictionary();
  copyOplogs();
  
  Iterator<File> oplogIterator = FileUtils.iterateFiles(oplogCopy, new String[]{"krf"}, true);
  while (oplogIterator.hasNext()) {
    corruptFile(500, (File)oplogIterator.next());
  }
  GemFireXDDataExtractorImpl extractor = new GemFireXDDataExtractorImpl();
  extractor.createTestConnection();
  extractor.retrieveAllRowFormatters();
  
  List<String> diskStoreList = new ArrayList<String>();
  diskStoreList.add(oplogCopy.getCanonicalPath());
  List<GFXDSnapshotExportStat> stats = extractor.extractDiskStores(testDiskStoreName, diskStoreList, outputDir.getCanonicalPath());
  GFXDSnapshotExportStat stat = stats.get(0);
  assertEquals(1, stats.size());
  assertTrue(stat.isCorrupt());
}
 
Example 9
Source File: Paths.java    From nd4j with Apache License 2.0 6 votes vote down vote up
/**
 * Check if a file exists in the path
 * @param name the name of the file
 * @return true if the name exists
 * false otherwise
 */
public static boolean nameExistsInPath(String name) {
    String path = System.getenv(PATH_ENV_VARIABLE);
    String[] dirs = path.split(File.pathSeparator);
    for (String dir : dirs) {
        File dirFile = new File(dir);
        if (!dirFile.exists())
            continue;

        if (dirFile.isFile() && dirFile.getName().equals(name))
            return true;
        else {
            Iterator<File> files = FileUtils.iterateFiles(dirFile, null, false);
            while (files.hasNext()) {
                File curr = files.next();
                if (curr.getName().equals(name))
                    return true;
            }

        }
    }

    return false;
}
 
Example 10
Source File: RunnerUtils.java    From wisdom with Apache License 2.0 5 votes vote down vote up
/**
 * Checks if a file having somewhat the current tested application name is contained in the given directory. This
 * method follows the default maven semantic. The final file is expected to have a name compliant with the
 * following rules: <code>artifactId-version.jar</code>. If the version ends with <code>-SNAPSHOT</code>,
 * it just checks for <code>artifactId-stripped_version</code>, where stripped version is the version without the
 * <code>SNAPSHOT</code> part.
 * <p>
 * The artifactId and version are read from the <code>target/osgi/osgi.properties</code> file,
 * that should have been written by the Wisdom build process.
 *
 * @param directory the directory
 * @return the bundle file if found
 * @throws java.io.IOException if something bad happens.
 */
public static File detectApplicationBundleIfExist(File directory) throws IOException {
    Properties properties = getMavenProperties();
    if (properties == null || directory == null || !directory.isDirectory()) {
        return null;
    }

    final String artifactId = properties.getProperty("project.artifactId");
    final String groupId = properties.getProperty("project.groupId");
    final String bsn = getBundleSymbolicName(groupId, artifactId);
    String version = properties.getProperty("project.version");
    final String strippedVersion;
    if (version.endsWith("-SNAPSHOT")) {
        strippedVersion = version.substring(0, version.length() - "-SNAPSHOT".length());
    } else {
        strippedVersion = version;
    }

    Iterator<File> files = FileUtils.iterateFiles(directory, new AbstractFileFilter() {
        @Override
        public boolean accept(File file) {
            return file.isFile()
                    && file.getName().startsWith(bsn + "-" + strippedVersion)
                    && file.getName().endsWith(".jar");
        }
    }, TrueFileFilter.INSTANCE);

    if (files.hasNext()) {
        return files.next();
    }
    return null;
}
 
Example 11
Source File: JsonToYamlFileDuplicator.java    From swagger-parser with Apache License 2.0 5 votes vote down vote up
public static void duplicateFilesInYamlFormat(String inputDirectoryStr, String outputDirectoryStr) {
    Path outputDirectory = Paths.get(outputDirectoryStr);
    Path inputDirectory = Paths.get(inputDirectoryStr);

    deleteAndRecreateOutputDirectory(outputDirectory);

    final Iterator<File> fileIterator = FileUtils.iterateFiles(inputDirectory.toFile(), new String[]{"json"}, true);
    while (fileIterator.hasNext()) {
        File next = fileIterator.next();
        System.out.println("Processing " + next);

        processFile(next, inputDirectory, outputDirectory);
    }

}
 
Example 12
Source File: GemFireXDDataExtractorJUnit.java    From gemfirexd-oss with Apache License 2.0 5 votes vote down vote up
public void testCSVExportSingleNodeReplicateOplogCrfCorrupt() throws Exception {
  String tableName = "REPLICATED_TABLE";
  this.createDiskStore(testDiskStoreName);
  this.createPersistentReplicateTable(tableName, testDiskStoreName);
  this.insertData(tableName, 0, 10000);
  this.updateData(tableName, 0, 300);
  this.deleteData(tableName, 300, 600);
  
  copyDataDictionary();
  copyOplogs();

  Iterator<File> oplogIterator = FileUtils.iterateFiles(oplogCopy, new String[]{"crf"}, true);
  while (oplogIterator.hasNext()) {
    corruptFile(500, (File)oplogIterator.next());
  }
      
  GemFireXDDataExtractorImpl extractor = new GemFireXDDataExtractorImpl();
  extractor.createTestConnection();
  extractor.retrieveAllRowFormatters();
  
  List<String> diskStoreList = new ArrayList<String>();
  diskStoreList.add(oplogCopy.getCanonicalPath());
  List<GFXDSnapshotExportStat> stats = extractor.extractDiskStores(testDiskStoreName, diskStoreList, outputDir.getCanonicalPath());
  GFXDSnapshotExportStat stat = stats.get(0);
  assertEquals(1, stats.size());
  assertTrue(stat.isCorrupt());
}
 
Example 13
Source File: FileSentenceIterator.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
/**
 * Takes a single file or directory
 *
 * @param preProcessor the sentence pre processor
 * @param file         the file or folder to iterate over
 */
public FileSentenceIterator(SentencePreProcessor preProcessor, File file) {
    super(preProcessor);
    this.file = file;
    cache = new java.util.concurrent.ConcurrentLinkedDeque<>();
    if (file.isDirectory())
        fileIterator = FileUtils.iterateFiles(file, null, true);
    else
        fileIterator = Arrays.asList(file).iterator();
}
 
Example 14
Source File: IOHandler.java    From pdf-unstamper with GNU General Public License v3.0 5 votes vote down vote up
public static Iterator<File> getFiles(
        @NotNull String idn,
        @NotNull boolean recursive) {
    File dirI = new File(idn);
    if (dirI.exists() && dirI.isDirectory()) {
        return FileUtils.iterateFiles(dirI, new String[]{"pdf"}, recursive);
    } else
        return null;
}
 
Example 15
Source File: MapleQuestItemFetcher.java    From HeavenMS with GNU Affero General Public License v3.0 5 votes vote down vote up
private static void filterDirectorySearchMatchingData(String path, List<Pair<Integer, Integer>> itemsWithQuest) {
    Iterator iter = FileUtils.iterateFiles(new File(directoryName + "/" + path), new String[]{"sql", "js", "txt","java"}, true);

    while(iter.hasNext()) {
        File file = (File) iter.next();
        fileSearchMatchingData(file, itemsWithQuest);
    }
}
 
Example 16
Source File: UpdateManager.java    From offspring with MIT License 5 votes vote down vote up
private Map<File, Long> generateChecksumMap(File directory)
    throws IOException {
  Map<File, Long> map = new HashMap<File, Long>();
  Iterator<File> files = FileUtils.iterateFiles(directory, null, true);
  while (files != null && files.hasNext()) {
    File file = files.next();
    long checksum = FileUtils.checksumCRC32(file);
    map.put(file, checksum);
  }
  return map;
}
 
Example 17
Source File: FileRecordReader.java    From DataVec with Apache License 2.0 5 votes vote down vote up
protected void advanceToNextLocation () {
    //File file;
    String path = locationsIterator.next(); // should always have file:// preceding
    if(!path.startsWith("file:")){
        path = "file:///" + path;
    }
    if(path.contains("\\")){
        path = path.replaceAll("\\\\","/");
    }
    File file = new File(URI.create(path));
    if (file.isDirectory())
        iter = FileUtils.iterateFiles(file, null, true);
    else
        iter = Collections.singletonList(file).iterator();
}
 
Example 18
Source File: HadoopClientServicesImpl.java    From pentaho-hadoop-shims with Apache License 2.0 4 votes vote down vote up
private void addDependencyJars( Configuration conf, Class... classes )
  throws IOException {
  List<String> classNames = new ArrayList<>();
  for ( Class clazz : classes ) {
    classNames.add( clazz.getCanonicalName().replace( ".", "/" ) + ".class" );
  }
  Set<String> tmpjars = new HashSet<>();
  if ( conf.get( TMPJARS ) != null ) {
    tmpjars.addAll( Arrays.asList( conf.get( TMPJARS ).split( "," ) ) );
  }

  for ( String bundleFileLocation : sqoopBundleFileLocations ) {
    File filesInsideBundle = new File( bundleFileLocation );
    Iterator<File> filesIterator = FileUtils.iterateFiles( filesInsideBundle, new String[] { "jar" }, true );

    getOut:
    while ( filesIterator.hasNext() ) {
      File file = filesIterator.next();

      // Process the jar file.

      try ( ZipFile zip = new ZipFile( file ) ) {
        // Loop through the jar entries and print the name of each one.

        for ( Enumeration list = zip.entries(); list.hasMoreElements(); ) {
          ZipEntry entry = (ZipEntry) list.nextElement();
          if ( !entry.isDirectory() && entry.getName().endsWith( ".class" ) ) {
            ListIterator<String> classNameIterator = classNames.listIterator();
            while ( classNameIterator.hasNext() ) {
              if ( entry.getName().endsWith( classNameIterator.next() ) ) {
                // If here we found a class in this jar, add the jar to the list, and delete the class from
                // classNames.
                tmpjars.add( file.toURI().toURL().toString() );
                classNameIterator.remove();
                if ( classNames.size() == 0 ) {
                  break getOut;
                }
              }
            }
          }
        }
      }
    }
  }

  StringBuilder sb = new StringBuilder();
  if ( tmpjars.size() > 0 ) {
    for ( String jarPath : tmpjars ) {
      sb.append( "," ).append( jarPath );
    }
    conf.set( TMPJARS, sb.toString().substring( 1 ) );
  }
}
 
Example 19
Source File: Word2VecDataFetcher.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public void reset() {
    files = FileUtils.iterateFiles(new File(path), null, true);
    cache.clear();

}
 
Example 20
Source File: MSLibBatchImportWorker.java    From ghidra with Apache License 2.0 4 votes vote down vote up
@Override
protected void run() throws Exception {
	// If running this via 'HeadlessAnalyzer', a MSLibBatchImportWorker.properties 
	// file needs to be created with a single line specifying the queue directory:
	// "Choose queue directory Choose=/path/to/queue/dir"
	// (without any quotes)
	// Which corresponds to the askDirectory() prompt in the next line:
	File directory = askDirectory("Choose queue directory", "Choose");
	// or the value could be hard-coded:
	// File directory = new File("/path/to/queue/dir");

	File newDir = new File(directory, "new");
	File workDir = new File(directory, "work");
	File doneDir = new File(directory, "done");
	newDir.mkdir();
	workDir.mkdir();
	doneDir.mkdir();

	int totalFilesProcessed = 0;
	long lastWorkTS = System.currentTimeMillis();
	int maxIdleMS = 10 * 1000;

	while ((System.currentTimeMillis() - lastWorkTS) < maxIdleMS) {
		if (monitor.isCancelled()) {
			break;
		}

		int filesProcessed = 0;

		Iterator<File> files = FileUtils.iterateFiles(newDir, null, false);
		while (files.hasNext()) {

			File newFile = files.next();

			File workFile = new File(workDir, ".work_" + pid + "_" + newFile.getName());
			if (!newFile.renameTo(workFile)) {
				continue;
			}
			workFile.setLastModified(System.currentTimeMillis());

			List<String> lines = FileUtilities.getLines(workFile);
			if (lines.size() != 2) {
				println("Found bad file: " + workFile);
				continue;
			}
			String importFilePath = lines.get(0);
			String destFolderPath = lines.get(1);

			File importFile = new File(importFilePath);
			if (!importFile.exists()) {
				println("Can not find import file: " + importFile);
				continue;
			}
			DomainFolder destFolder = getFolder(
				state.getProject().getProjectData().getRootFolder(), destFolderPath, true);
			MessageLog log = new MessageLog();
			importLibrary(destFolder, importFile, log);

			File doneFile = new File(doneDir, newFile.getName());
			if (!workFile.renameTo(doneFile)) {
				throw new IOException("Failed to move " + workFile + " to " + doneFile);
			}
			doneFile.setLastModified(System.currentTimeMillis());

			filesProcessed++;
			totalFilesProcessed++;
		}
		lastWorkTS = (filesProcessed != 0) ? System.currentTimeMillis() : lastWorkTS;

		Thread.sleep(500);
	}
	println("Exiting import wait loop, total files processed: " + totalFilesProcessed);
}