Java Code Examples for org.apache.commons.vfs2.FileObject#createFolder()

The following examples show how to use org.apache.commons.vfs2.FileObject#createFolder() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HopVfsFileDialog.java    From hop with Apache License 2.0 6 votes vote down vote up
@GuiToolbarElement(
  root = BROWSER_TOOLBAR_PARENT_ID,
  id = BROWSER_ITEM_ID_CREATE_FOLDER,
  toolTip = "Create folder",
  image = "ui/images/Add.svg"
)
public void createFolder() {
  String folder = "";
  EnterStringDialog dialog = new EnterStringDialog( shell, folder, "Create directory", "Please enter name of the folder to create in : " + activeFolder );
  folder = dialog.open();
  if ( folder != null ) {
    String newPath = activeFolder.toString();
    if ( !newPath.endsWith( "/" ) && !newPath.endsWith( "\\" ) ) {
      newPath += "/";
    }
    newPath += folder;
    try {
      FileObject newFolder = HopVfs.getFileObject( newPath );
      newFolder.createFolder();
      refreshBrowser();
    } catch ( Throwable e ) {
      showError( "Error creating folder '" + newPath + "'", e );
    }
  }
}
 
Example 2
Source File: ResourceService.java    From spoofax with Apache License 2.0 6 votes vote down vote up
@Override public File localFile(FileObject resource, FileObject dir) {
    if(resource instanceof LocalFile) {
        return FileUtils.toFile(resource);
    }

    final File localDir = localPath(dir);
    if(localDir == null) {
        throw new MetaborgRuntimeException("Replication directory " + dir
            + " is not on the local filesystem, cannot get local file for " + resource);
    }
    try {
        dir.createFolder();

        final FileObject copyLoc;
        if(resource.getType() == FileType.FOLDER) {
            copyLoc = dir;
        } else {
            copyLoc = ResourceUtils.resolveFile(dir, resource.getName().getBaseName());
        }
        copyLoc.copyFrom(resource, new AllFileSelector());

        return localDir;
    } catch(FileSystemException e) {
        throw new MetaborgRuntimeException("Could not get local file for " + resource, e);
    }
}
 
Example 3
Source File: FTPRemoteLocationExecutorDelegate.java    From datacollector with Apache License 2.0 6 votes vote down vote up
@Override
public boolean move(String sourceFile, String targetDir, boolean overwriteFile) throws IOException {
  FileObject targetFolder = resolveChild(targetDir);
  targetFolder.createFolder();

  String targetFileName = StringUtils.appendIfMissing(targetFolder.getName().getPath(), "/")
      + StringUtils.substringAfterLast(sourceFile, "/");

  FileObject targetFile = targetFolder.resolveFile(targetFileName, NameScope.DESCENDENT);

  if (!overwriteFile && targetFile.exists()) {
    return false;
  }

  resolveChild(sourceFile).moveTo(targetFile);
  targetFile.close();


  return true;
}
 
Example 4
Source File: TextFileOutput.java    From hop with Apache License 2.0 5 votes vote down vote up
private void createParentFolder( String filename ) throws Exception {
  // Check for parent folder
  FileObject parentfolder = null;
  try {
    // Get parent folder
    parentfolder = getFileObject( filename, getPipelineMeta() ).getParent();
    if ( parentfolder.exists() ) {
      if ( isDetailed() ) {
        logDetailed( BaseMessages.getString( PKG, "TextFileOutput.Log.ParentFolderExist",
          HopVfs.getFriendlyURI( parentfolder ) ) );
      }
    } else {
      if ( isDetailed() ) {
        logDetailed( BaseMessages.getString( PKG, "TextFileOutput.Log.ParentFolderNotExist",
          HopVfs.getFriendlyURI( parentfolder ) ) );
      }
      if ( meta.isCreateParentFolder() ) {
        parentfolder.createFolder();
        if ( isDetailed() ) {
          logDetailed( BaseMessages.getString( PKG, "TextFileOutput.Log.ParentFolderCreated",
            HopVfs.getFriendlyURI( parentfolder ) ) );
        }
      } else {
        throw new HopException( BaseMessages.getString( PKG, "TextFileOutput.Log.ParentFolderNotExistCreateIt",
          HopVfs.getFriendlyURI( parentfolder ), HopVfs.getFriendlyURI( filename ) ) );
      }
    }
  } finally {
    if ( parentfolder != null ) {
      try {
        parentfolder.close();
      } catch ( Exception ex ) {
        // Ignore
      }
    }
  }
}
 
Example 5
Source File: ResourceAgent.java    From spoofax with Apache License 2.0 5 votes vote down vote up
@Override public boolean mkdir(String dn) {
    try {
        final FileObject resource = resourceService.resolve(workingDir, dn);
        final boolean created = !resource.exists();
        resource.createFolder();
        return created;
    } catch(FileSystemException e) {
        throw new RuntimeException("Could not create directories", e);
    }
}
 
Example 6
Source File: JsonOutput.java    From hop with Apache License 2.0 5 votes vote down vote up
private void createParentFolder( String filename ) throws HopTransformException {
  if ( !meta.isCreateParentFolder() ) {
    return;
  }
  // Check for parent folder
  FileObject parentfolder = null;
  try {
    // Get parent folder
    parentfolder = HopVfs.getFileObject( filename ).getParent();
    if ( !parentfolder.exists() ) {
      if ( log.isDebug() ) {
        logDebug( BaseMessages.getString( PKG, "JsonOutput.Error.ParentFolderNotExist", parentfolder.getName() ) );
      }
      parentfolder.createFolder();
      if ( log.isDebug() ) {
        logDebug( BaseMessages.getString( PKG, "JsonOutput.Log.ParentFolderCreated" ) );
      }
    }
  } catch ( Exception e ) {
    throw new HopTransformException( BaseMessages.getString(
      PKG, "JsonOutput.Error.ErrorCreatingParentFolder", parentfolder.getName() ) );
  } finally {
    if ( parentfolder != null ) {
      try {
        parentfolder.close();
      } catch ( Exception ex ) { /* Ignore */
      }
    }
  }
}
 
Example 7
Source File: JobEntrySSH2GET.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
private boolean CreateFolder( String filefolder ) {
  FileObject folder = null;
  try {
    folder = KettleVFS.getFileObject( filefolder, this );

    if ( !folder.exists() ) {
      if ( createtargetfolder ) {
        folder.createFolder();
        if ( log.isDetailed() ) {
          logDetailed( BaseMessages.getString( PKG, "JobSSH2GET.Log.FolderCreated", folder.toString() ) );
        }
      } else {
        return false;
      }

    }
    return true;
  } catch ( Exception e ) {
    logError( BaseMessages.getString( PKG, "JobSSH2GET.Log.CanNotCreateFolder", folder.toString() ), e );

  } finally {
    if ( folder != null ) {
      try {
        folder.close();
      } catch ( Exception ex ) { /* Ignore */
      }
    }
  }
  return false;
}
 
Example 8
Source File: ProviderRenameTests.java    From commons-vfs with Apache License 2.0 5 votes vote down vote up
/**
 * Moves a file from a child folder to a parent folder to test what happens when the original folder is now empty.
 *
 * See [VFS-298] FTP: Exception is thrown when renaming a file.
 */
public void testRenameFileAndLeaveFolderEmpty() throws Exception {
    final FileObject scratchFolder = createScratchFolder();
    final FileObject folder = scratchFolder.resolveFile("folder");
    folder.createFolder();
    assertTrue(folder.exists());
    final FileObject file = folder.resolveFile("file1.txt");
    assertFalse(file.exists());

    final String content = createTestFile(file);

    // Make sure we can move the new file to another file on the same file system
    moveFile(scratchFolder, file, content);
    assertEquals(0, folder.getChildren().length);
}
 
Example 9
Source File: RepositoryTreeDialog.java    From pentaho-reporting with GNU Lesser General Public License v2.1 5 votes vote down vote up
/**
 * Invoked when an action occurs.
 */
public void actionPerformed( final ActionEvent e ) {
  final CreateNewRepositoryFolderDialog newFolderDialog =
      new CreateNewRepositoryFolderDialog( RepositoryTreeDialog.this );

  if ( !newFolderDialog.performEdit() ) {
    return;
  }

  final TreePath selectionPath = repositoryBrowser.getSelectionPath();
  if ( selectionPath == null ) {
    return;
  }

  final FileObject treeNode = (FileObject) selectionPath.getLastPathComponent();
  if ( !StringUtils.isEmpty( newFolderDialog.getName() ) ) {
    final Component glassPane = SwingUtilities.getRootPane( RepositoryTreeDialog.this ).getGlassPane();
    try {
      glassPane.setVisible( true );
      glassPane.setCursor( new Cursor( Cursor.WAIT_CURSOR ) );
      final FileObject child = treeNode.resolveFile( newFolderDialog.getFolderName() );
      if ( child instanceof WebSolutionFileObject ) {
        final WebSolutionFileObject webSolutionFileObject = (WebSolutionFileObject) child;
        webSolutionFileObject.setDescription( newFolderDialog.getDescription() );
      }
      child.createFolder();
      repositoryTreeModel.fireTreeDataChanged();
      repositoryBrowser.setSelectionPath( selectionPath.getParentPath().pathByAddingChild( child ) );
      setDirty( true );
    } catch ( Exception e1 ) {
      UncaughtExceptionsModel.getInstance().addException( e1 );
    } finally {
      glassPane.setVisible( false );
      glassPane.setCursor( new Cursor( Cursor.DEFAULT_CURSOR ) );
    }
  }
}
 
Example 10
Source File: ProviderRandomReadWriteTests.java    From commons-vfs with Apache License 2.0 5 votes vote down vote up
/**
 * Sets up a scratch folder for the test to use.
 */
protected FileObject createScratchFolder() throws Exception {
    final FileObject scratchFolder = getWriteFolder();

    // Make sure the test folder is empty
    scratchFolder.delete(Selectors.EXCLUDE_SELF);
    scratchFolder.createFolder();

    return scratchFolder;
}
 
Example 11
Source File: DataSpaceNodeConfigurationAgent.java    From scheduling with GNU Affero General Public License v3.0 5 votes vote down vote up
@Override
public void run() {
    try {
        long invalidationPeriod = getCacheInvalidationPeriod();
        long currentTime = System.currentTimeMillis();
        // lock the timer in write mode, this will prevent any Task to start during the cleaning process
        if (cacheCleaningRWLock.writeLock().tryLock()) {
            try {
                FileObject rootFO = fileSystemManager.resolveFile(rootCacheUri);
                if (!rootFO.exists()) {
                    rootFO.createFolder();
                }
                FileObject[] files = rootFO.findFiles(Selectors.EXCLUDE_SELF);
                if (files != null) {
                    for (FileObject file : files) {
                        if (currentTime - file.getContent().getLastModifiedTime() > invalidationPeriod) {
                            logger.info("[Cache Space cleaner] deleting " + file);
                            file.delete();
                        }
                    }
                }
            } finally {
                cacheCleaningRWLock.writeLock().unlock();
            }
        }
    } catch (Exception e) {
        logger.error("Error when cleaning files in cache", e);
    }
}
 
Example 12
Source File: AbstractFileObject.java    From commons-vfs with Apache License 2.0 5 votes vote down vote up
/**
 * Prepares this file for writing. Makes sure it is either a file, or its parent folder exists. Returns an output
 * stream to use to write the content of the file to.
 *
 * @param bAppend true when append to the file.
 *            Note: If the underlying file system does not support appending, a FileSystemException is thrown.
 * @return An OutputStream where the new contents of the file can be written.
 * @throws FileSystemException if an error occurs; for example:
 *             bAppend is true, and the underlying FileSystem does not support it
 */
public OutputStream getOutputStream(final boolean bAppend) throws FileSystemException {
    /*
     * VFS-210 if (getType() != FileType.IMAGINARY && !getType().hasContent()) { throw new
     * FileSystemException("vfs.provider/write-not-file.error", name); } if (!isWriteable()) { throw new
     * FileSystemException("vfs.provider/write-read-only.error", name); }
     */

    if (bAppend && !fileSystem.hasCapability(Capability.APPEND_CONTENT)) {
        throw new FileSystemException("vfs.provider/write-append-not-supported.error", fileName);
    }

    if (getType() == FileType.IMAGINARY) {
        // Does not exist - make sure parent does
        final FileObject parent = getParent();
        if (parent != null) {
            parent.createFolder();
        }
    }

    // Get the raw output stream
    try {
        return doGetOutputStream(bAppend);
    } catch (final RuntimeException re) {
        throw re;
    } catch (final Exception exc) {
        throw new FileSystemException("vfs.provider/write.error", exc, fileName);
    }
}
 
Example 13
Source File: ProviderRandomSetLengthTests.java    From commons-vfs with Apache License 2.0 5 votes vote down vote up
/**
 * Sets up a scratch folder for the test to use.
 */
protected FileObject createScratchFolder() throws Exception {
    final FileObject scratchFolder = this.getWriteFolder();

    // Make sure the test folder is empty
    scratchFolder.delete(Selectors.EXCLUDE_SELF);
    scratchFolder.createFolder();

    return scratchFolder;
}
 
Example 14
Source File: ProviderWriteAppendTests.java    From commons-vfs with Apache License 2.0 5 votes vote down vote up
/**
 * Sets up a scratch folder for the test to use.
 */
protected FileObject createScratchFolder() throws Exception {
    final FileObject scratchFolder = getWriteFolder();

    // Make sure the test folder is empty
    scratchFolder.delete(Selectors.EXCLUDE_SELF);
    scratchFolder.createFolder();

    return scratchFolder;
}
 
Example 15
Source File: VFSZipper.java    From scheduling with GNU Affero General Public License v3.0 5 votes vote down vote up
public static void unzip(InputStream is, FileObject outfileObj) throws IOException {
    Closer closer = Closer.create();
    try {
        ZipInputStream zis = new ZipInputStream(is);
        closer.register(zis);
        ZipEntry zipEntry = zis.getNextEntry();
        while (zipEntry != null) {
            FileObject entryFile = outfileObj.resolveFile(zipEntry.getName());

            if (zipEntry.isDirectory()) {
                logger.debug("Creating folder " + entryFile.getURL());
                entryFile.createFolder();
            } else {
                if (!entryFile.exists()) {
                    logger.debug("Creating file " + entryFile.getURL());
                    entryFile.createFile();
                } else {
                    logger.debug("Overwriting file " + entryFile.getURL());
                }
                Zipper.ZIP.unzipEntry(zis, entryFile.getContent().getOutputStream());
            }

            zipEntry = zis.getNextEntry();
        }
    } catch (IOException ioe) {
        logger.error("Error when unzipping", ioe);
        throw closer.rethrow(ioe);
    } finally {
        closer.close();
    }
}
 
Example 16
Source File: DistributedCacheUtilImpl.java    From pentaho-hadoop-shims with Apache License 2.0 4 votes vote down vote up
/**
 * Extract a zip archive to a directory.
 *
 * @param archive Zip archive to extract
 * @param dest    Destination directory. This must not exist!
 * @return Directory the zip was extracted into
 * @throws IllegalArgumentException when the archive file does not exist or the destination directory already exists
 * @throws IOException
 * @throws KettleFileException
 */
public FileObject extract( FileObject archive, FileObject dest ) throws IOException, KettleFileException {
  if ( !archive.exists() ) {
    throw new IllegalArgumentException( "archive does not exist: " + archive.getURL().getPath() );
  }

  if ( dest.exists() ) {
    throw new IllegalArgumentException( "destination already exists" );
  }
  dest.createFolder();

  try {
    byte[] buffer = new byte[ DEFAULT_BUFFER_SIZE ];
    int len = 0;
    ZipInputStream zis = new ZipInputStream( archive.getContent().getInputStream() );
    try {
      ZipEntry ze;
      while ( ( ze = zis.getNextEntry() ) != null ) {
        FileObject entry = KettleVFS.getFileObject( dest + Const.FILE_SEPARATOR + ze.getName() );
        FileObject parent = entry.getParent();
        if ( parent != null ) {
          parent.createFolder();
        }
        if ( ze.isDirectory() ) {
          entry.createFolder();
          continue;
        }

        OutputStream os = KettleVFS.getOutputStream( entry, false );
        try {
          while ( ( len = zis.read( buffer ) ) > 0 ) {
            os.write( buffer, 0, len );
          }
        } finally {
          if ( os != null ) {
            os.close();
          }
        }
      }
    } finally {
      if ( zis != null ) {
        zis.close();
      }
    }
  } catch ( Exception ex ) {
    // Try to clean up the temp directory and all files
    if ( !deleteDirectory( dest ) ) {
      throw new KettleFileException( "Could not clean up temp dir after error extracting", ex );
    }
    throw new KettleFileException( "error extracting archive", ex );
  }

  return dest;
}
 
Example 17
Source File: JobEntryPGPDecryptFiles.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
private boolean CreateDestinationFolder( FileObject filefolder ) {
  FileObject folder = null;
  try {
    if ( destination_is_a_file ) {
      folder = filefolder.getParent();
    } else {
      folder = filefolder;
    }

    if ( !folder.exists() ) {
      if ( create_destination_folder ) {
        if ( isDetailed() ) {
          logDetailed( BaseMessages.getString( PKG, "JobPGPDecryptFiles.Log.FolderNotExist", folder
            .getName().toString() ) );
        }
        folder.createFolder();
        if ( isDetailed() ) {
          logDetailed( BaseMessages.getString( PKG, "JobPGPDecryptFiles.Log.FolderWasCreated", folder
            .getName().toString() ) );
        }
      } else {
        logError( BaseMessages.getString( PKG, "JobPGPDecryptFiles.Log.FolderNotExist", folder
          .getName().toString() ) );
        return false;
      }
    }
    return true;
  } catch ( Exception e ) {
    logError( BaseMessages.getString( PKG, "JobPGPDecryptFiles.Log.CanNotCreateParentFolder", folder
      .getName().toString() ), e );

  } finally {
    if ( folder != null ) {
      try {
        folder.close();
      } catch ( Exception ex ) { /* Ignore */
      }
    }
  }
  return false;
}
 
Example 18
Source File: JobEntryJob.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
private boolean createParentFolder( String filename ) {
  // Check for parent folder
  FileObject parentfolder = null;
  boolean resultat = true;
  try {
    // Get parent folder
    parentfolder = KettleVFS.getFileObject( filename, this ).getParent();
    if ( !parentfolder.exists() ) {
      if ( createParentFolder ) {
        if ( log.isDebug() ) {
          log.logDebug( BaseMessages.getString( PKG, "JobJob.Log.ParentLogFolderNotExist", parentfolder
            .getName().toString() ) );
        }
        parentfolder.createFolder();
        if ( log.isDebug() ) {
          log.logDebug( BaseMessages.getString( PKG, "JobJob.Log.ParentLogFolderCreated", parentfolder
            .getName().toString() ) );
        }
      } else {
        log.logError( BaseMessages.getString( PKG, "JobJob.Log.ParentLogFolderNotExist", parentfolder
          .getName().toString() ) );
        resultat = false;
      }
    } else {
      if ( log.isDebug() ) {
        log.logDebug( BaseMessages.getString( PKG, "JobJob.Log.ParentLogFolderExists", parentfolder
          .getName().toString() ) );
      }
    }
  } catch ( Exception e ) {
    resultat = false;
    log.logError( BaseMessages.getString( PKG, "JobJob.Error.ChekingParentLogFolderTitle" ), BaseMessages
      .getString( PKG, "JobJob.Error.ChekingParentLogFolder", parentfolder.getName().toString() ), e );
  } finally {
    if ( parentfolder != null ) {
      try {
        parentfolder.close();
        parentfolder = null;
      } catch ( Exception ex ) {
        // Ignore
      }
    }
  }

  return resultat;
}
 
Example 19
Source File: LucidDBBulkLoader.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public boolean execute( LucidDBBulkLoaderMeta meta, boolean wait ) throws KettleException {
  Runtime rt = Runtime.getRuntime();

  try {
    String tableName = environmentSubstitute( meta.getTableName() );

    // 1) Set up the FIFO folder, create the directory and path to it...
    //
    String fifoVfsDirectory = environmentSubstitute( meta.getFifoDirectory() );
    FileObject directory = KettleVFS.getFileObject( fifoVfsDirectory, getTransMeta() );
    directory.createFolder();
    String fifoDirectory = KettleVFS.getFilename( directory );

    // 2) Create the FIFO file using the "mkfifo" command...
    // Make sure to log all the possible output, also from STDERR
    //
    data.fifoFilename = KettleVFS.getFilename( directory ) + Const.FILE_SEPARATOR + tableName + ".csv";
    data.bcpFilename = KettleVFS.getFilename( directory ) + Const.FILE_SEPARATOR + tableName + ".bcp";

    File fifoFile = new File( data.fifoFilename );
    if ( !fifoFile.exists() ) {
      String mkFifoCmd = "mkfifo " + data.fifoFilename + "";
      logBasic( "Creating FIFO file using this command : " + mkFifoCmd );
      Process mkFifoProcess = rt.exec( mkFifoCmd );
      StreamLogger errorLogger = new StreamLogger( log, mkFifoProcess.getErrorStream(), "mkFifoError" );
      StreamLogger outputLogger = new StreamLogger( log, mkFifoProcess.getInputStream(), "mkFifoOuptut" );
      new Thread( errorLogger ).start();
      new Thread( outputLogger ).start();
      int result = mkFifoProcess.waitFor();
      if ( result != 0 ) {
        throw new Exception( "Return code " + result + " received from statement : " + mkFifoCmd );
      }
    }

    // 3) Make a connection to LucidDB for sending SQL commands
    // (Also, we need a clear cache for getting up-to-date target metadata)
    DBCache.getInstance().clear( meta.getDatabaseMeta().getName() );
    if ( meta.getDatabaseMeta() == null ) {
      logError( BaseMessages.getString( PKG, "LuciDBBulkLoader.Init.ConnectionMissing", getStepname() ) );
      return false;
    }
    data.db = new Database( this, meta.getDatabaseMeta() );
    data.db.shareVariablesWith( this );
    // Connect to the database
    if ( getTransMeta().isUsingUniqueConnections() ) {
      synchronized ( getTrans() ) {
        data.db.connect( getTrans().getTransactionId(), getPartitionID() );
      }
    } else {
      data.db.connect( getPartitionID() );
    }

    logBasic( "Connected to LucidDB" );

    // 4) Now we are ready to create the LucidDB FIFO server that will handle the actual bulk loading.
    //
    String fifoServerStatement = "";
    fifoServerStatement += "create or replace server " + meta.getFifoServerName() + Const.CR;
    fifoServerStatement += "foreign data wrapper sys_file_wrapper" + Const.CR;
    fifoServerStatement += "options (" + Const.CR;
    fifoServerStatement += "directory '" + fifoDirectory + "'," + Const.CR;
    fifoServerStatement += "file_extension 'csv'," + Const.CR;
    fifoServerStatement += "with_header 'no'," + Const.CR;
    fifoServerStatement += "num_rows_scan '0'," + Const.CR;
    fifoServerStatement += "lenient 'no');" + Const.CR;

    logBasic( "Creating LucidDB fifo_server with the following command: " + fifoServerStatement );
    data.db.execStatements( fifoServerStatement );

    // 5) Set the error limit in the LucidDB session
    // REVIEW jvs 13-Dec-2008: is this guaranteed to retain the same
    // connection?
    String errorMaxStatement = "";
    errorMaxStatement += "alter session set \"errorMax\" = " + meta.getMaxErrors() + ";" + Const.CR;
    logBasic( "Setting error limit in LucidDB session with the following command: " + errorMaxStatement );
    data.db.execStatements( errorMaxStatement );

    // 6) Now we also need to create a bulk loader file .bcp
    //
    createBulkLoadConfigFile( data.bcpFilename );

    // 7) execute the actual load command!
    // This will actually block until the load is done in the
    // separate execution thread; see notes in executeLoadCommand
    // on why it's important for this to occur BEFORE
    // opening our end of the FIFO.
    //
    executeLoadCommand( tableName );

    // 8) We have to write rows to the FIFO file later on.
    data.fifoStream = new BufferedOutputStream( new FileOutputStream( fifoFile ) );
  } catch ( Exception ex ) {
    throw new KettleException( ex );
  }

  return true;
}
 
Example 20
Source File: ActionPGPDecryptFiles.java    From hop with Apache License 2.0 4 votes vote down vote up
private boolean CreateDestinationFolder( FileObject filefolder ) {
  FileObject folder = null;
  try {
    if ( destination_is_a_file ) {
      folder = filefolder.getParent();
    } else {
      folder = filefolder;
    }

    if ( !folder.exists() ) {
      if ( create_destination_folder ) {
        if ( isDetailed() ) {
          logDetailed( BaseMessages.getString( PKG, "ActionPGPDecryptFiles.Log.FolderNotExist", folder
            .getName().toString() ) );
        }
        folder.createFolder();
        if ( isDetailed() ) {
          logDetailed( BaseMessages.getString( PKG, "ActionPGPDecryptFiles.Log.FolderWasCreated", folder
            .getName().toString() ) );
        }
      } else {
        logError( BaseMessages.getString( PKG, "ActionPGPDecryptFiles.Log.FolderNotExist", folder
          .getName().toString() ) );
        return false;
      }
    }
    return true;
  } catch ( Exception e ) {
    logError( BaseMessages.getString( PKG, "ActionPGPDecryptFiles.Log.CanNotCreateParentFolder", folder
      .getName().toString() ), e );

  } finally {
    if ( folder != null ) {
      try {
        folder.close();
      } catch ( Exception ex ) { /* Ignore */
      }
    }
  }
  return false;
}