Java Code Examples for org.pentaho.di.core.vfs.KettleVFS#getFileObject()

The following examples show how to use org.pentaho.di.core.vfs.KettleVFS#getFileObject() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: CubeOutput.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
private void prepareFile() throws KettleFileException {
  try {
    String filename = environmentSubstitute( meta.getFilename() );
    if ( meta.isAddToResultFiles() ) {
      // Add this to the result file names...
      ResultFile resultFile =
        new ResultFile(
          ResultFile.FILE_TYPE_GENERAL, KettleVFS.getFileObject( filename, getTransMeta() ), getTransMeta()
            .getName(), getStepname() );
      resultFile.setComment( "This file was created with a cube file output step" );
      addResultFile( resultFile );
    }

    data.fos = KettleVFS.getOutputStream( filename, getTransMeta(), false );
    data.zip = new GZIPOutputStream( data.fos );
    data.dos = new DataOutputStream( data.zip );
  } catch ( Exception e ) {
    throw new KettleFileException( e );
  }
}
 
Example 2
Source File: VFSFileProvider.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
@Override public VFSFile getFile( VFSFile file ) {
  try {
    FileObject fileObject = KettleVFS
      .getFileObject( file.getPath(), new Variables(), VFSHelper.getOpts( file.getPath(), file.getConnection() ) );
    if ( !fileObject.exists() ) {
      return null;
    }
    String parent = null;
    if ( fileObject.getParent() != null && fileObject.getParent().getName() != null ) {
      parent = fileObject.getParent().getName().getURI();
    } else {
      parent = fileObject.getURL().getProtocol() + "://";
    }
    if ( fileObject.getType().equals( FileType.FOLDER ) ) {
      return VFSDirectory.create( parent, fileObject, null, file.getDomain() );
    } else {
      return VFSFile.create( parent, fileObject, null, file.getDomain() );
    }
  } catch ( KettleFileException | FileSystemException e ) {
    // File does not exist
  }
  return null;
}
 
Example 3
Source File: JobEntryPGPEncryptFiles.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
private void addFileToResultFilenames( String fileaddentry, Result result, Job parentJob ) {
  try {
    ResultFile resultFile =
      new ResultFile( ResultFile.FILE_TYPE_GENERAL, KettleVFS.getFileObject( fileaddentry ), parentJob
        .getJobname(), toString() );
    result.getResultFiles().put( resultFile.getFile().toString(), resultFile );

    if ( isDebug() ) {
      logDebug( " ------ " );
      logDebug( BaseMessages.getString( PKG, "JobPGPEncryptFiles.Log.FileAddedToResultFilesName", fileaddentry ) );
    }

  } catch ( Exception e ) {
    logError( BaseMessages.getString( PKG, "JobPGPEncryptFiles.Error.AddingToFilenameResult" ), fileaddentry
      + "" + e.getMessage() );
  }
}
 
Example 4
Source File: LoadFileInputMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
/**
 * @param space
 *          the variable space to use
 * @param definitions
 * @param resourceNamingInterface
 * @param repository
 *          The repository to optionally load other resources from (to be converted to XML)
 * @param metaStore
 *          the metaStore in which non-kettle metadata could reside.
 *
 * @return the filename of the exported resource
 */
public String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions,
    ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ) throws KettleException {
  try {
    // The object that we're modifying here is a copy of the original!
    // So let's change the filename from relative to absolute by grabbing the file object...
    //
    if ( !fileinfield ) {
      for ( int i = 0; i < fileName.length; i++ ) {
        FileObject fileObject = KettleVFS.getFileObject( space.environmentSubstitute( fileName[i] ), space );
        fileName[i] = resourceNamingInterface.nameResource( fileObject, space, Utils.isEmpty( fileMask[i] ) );
      }
    }
    return null;
  } catch ( Exception e ) {
    throw new KettleException( e );
  }
}
 
Example 5
Source File: SQLFileOutputMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
/**
 * Since the exported transformation that runs this will reside in a ZIP file, we can't reference files relatively. So
 * what this does is turn the name of files into absolute paths OR it simply includes the resource in the ZIP file.
 * For now, we'll simply turn it into an absolute path and pray that the file is on a shared drive or something like
 * that.
 *
 * @param space
 *          the variable space to use
 * @param definitions
 * @param resourceNamingInterface
 * @param repository
 *          The repository to optionally load other resources from (to be converted to XML)
 * @param metaStore
 *          the metaStore in which non-kettle metadata could reside.
 *
 * @return the filename of the exported resource
 */
public String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions,
  ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ) throws KettleException {
  try {
    // The object that we're modifying here is a copy of the original!
    // So let's change the filename from relative to absolute by grabbing the file object...
    //
    // From : ${Internal.Transformation.Filename.Directory}/../foo/bar.data
    // To : /home/matt/test/files/foo/bar.data
    //
    FileObject fileObject = KettleVFS.getFileObject( space.environmentSubstitute( fileName ), space );

    // If the file doesn't exist, forget about this effort too!
    //
    if ( fileObject.exists() ) {
      // Convert to an absolute path...
      //
      fileName = resourceNamingInterface.nameResource( fileObject, space, true );

      return fileName;
    }
    return null;
  } catch ( Exception e ) {
    throw new KettleException( e );
  }
}
 
Example 6
Source File: JobEntryFTPSGetIT.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
@Test
public void downloadFile_WhenDestinationIsSetViaVariable() throws Exception {
  final String myVar = "my-var";
  final String expectedDownloadedFilePath = ramDir + "/" + FtpsServer.SAMPLE_FILE;

  JobEntryFTPSGet job = createCommonJob();
  job.setVariable( myVar, ramDir );
  job.setTargetDirectory( String.format( "${%s}", myVar ) );

  FileObject downloaded = KettleVFS.getFileObject( expectedDownloadedFilePath );
  assertFalse( downloaded.exists() );
  try {
    job.execute( new Result(), 1 );
    downloaded = KettleVFS.getFileObject( expectedDownloadedFilePath );
    assertTrue( downloaded.exists() );
  } finally {
    downloaded.delete();
  }
}
 
Example 7
Source File: GetFileNamesMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
/**
 * @param space
 *          the variable space to use
 * @param definitions
 * @param resourceNamingInterface
 * @param repository
 *          The repository to optionally load other resources from (to be converted to XML)
 * @param metaStore
 *          the metaStore in which non-kettle metadata could reside.
 *
 * @return the filename of the exported resource
 */
@Override
public String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions,
  ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ) throws KettleException {
  try {
    // The object that we're modifying here is a copy of the original!
    // So let's change the filename from relative to absolute by grabbing the file object...
    // In case the name of the file comes from previous steps, forget about this!
    //
    if ( !filefield ) {

      // Replace the filename ONLY (folder or filename)
      //
      for ( int i = 0; i < fileName.length; i++ ) {
        FileObject fileObject = KettleVFS.getFileObject( space.environmentSubstitute( fileName[i] ), space );
        fileName[i] = resourceNamingInterface.nameResource( fileObject, space, Utils.isEmpty( fileMask[i] ) );
      }
    }
    return null;
  } catch ( Exception e ) {
    throw new KettleException( e );
  }
}
 
Example 8
Source File: DistributedCacheUtilImplOSDependentTest.java    From pentaho-hadoop-shims with Apache License 2.0 6 votes vote down vote up
@Test
public void installKettleEnvironment_additional_plugins() throws Exception {
  DistributedCacheUtilImpl ch = new DistributedCacheUtilImpl();

  Configuration conf = new Configuration();
  FileSystem fs = DistributedCacheTestUtil.getLocalFileSystem( conf );

  // This "empty pmr" contains a lib/ folder but with no content
  FileObject pmrArchive = KettleVFS.getFileObject( getClass().getResource( "/empty-pmr.zip" ).toURI().getPath() );
  FileObject bigDataPluginDir = DistributedCacheTestUtil
    .createTestFolderWithContent( DistributedCacheUtilImpl.PENTAHO_BIG_DATA_PLUGIN_FOLDER_NAME );

  String pluginName = "additional-plugin";
  FileObject additionalPluginDir = DistributedCacheTestUtil.createTestFolderWithContent( pluginName );
  Path root = new Path( "bin/test/installKettleEnvironment" );
  try {
    ch.installKettleEnvironment( pmrArchive, fs, root, bigDataPluginDir, "bin/test/" + pluginName, "" );
    assertTrue( ch.isKettleEnvironmentInstalledAt( fs, root ) );
    assertTrue( fs.exists( new Path( root, "plugins/bin/test/" + pluginName ) ) );
  } finally {
    bigDataPluginDir.delete( new AllFileSelector() );
    additionalPluginDir.delete( new AllFileSelector() );
    fs.delete( root, true );
  }
}
 
Example 9
Source File: XsdValidatorIntTest.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
private FileObject loadRamFile( String filename ) throws Exception {
  String targetUrl = RAMDIR + "/" + filename;
  try ( InputStream source = getFileInputStream( filename ) ) {
    FileObject fileObject = KettleVFS.getFileObject( targetUrl );
    try ( OutputStream targetStream = fileObject.getContent().getOutputStream() ) {
      IOUtils.copy( source, targetStream );
    }
    return fileObject;
  }
}
 
Example 10
Source File: ConnectionFileProviderTest.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@Test
public void testGetChildren() throws Exception {
  ConnectionFileObject fileObject = (ConnectionFileObject) KettleVFS.getFileObject( PVFS_DIRECTORY_PATH );
  Assert.assertEquals( RESOLVED_DIRECTORY_PATH, fileObject.getResolvedFileObject().getPublicURIString() );
  FileObject[] children = fileObject.getChildren();
  for ( FileObject child : children ) {
    Assert.assertTrue( child.getPublicURIString().startsWith( PVFS_PREFIX ) );
  }
}
 
Example 11
Source File: KettleFileRepository.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@Override
public boolean exists( final String name, final RepositoryDirectoryInterface repositoryDirectory,
  final RepositoryObjectType objectType ) throws KettleException {
  try {
    FileObject fileObject =
      KettleVFS.getFileObject( calcFilename( repositoryDirectory, name, objectType.getExtension() ) );
    return fileObject.exists();
  } catch ( Exception e ) {
    throw new KettleException( e );
  }
}
 
Example 12
Source File: KettleFileRepository.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public void deleteRootObject( String name, String extension ) throws KettleException {
  try {
    String filename = calcDirectoryName( null ) + name + extension;
    FileObject fileObject = KettleVFS.getFileObject( filename );
    fileObject.delete();
  } catch ( Exception e ) {
    throw new KettleException( "Unable to delete database with name ["
      + name + "] and extension [" + extension + "]", e );
  }
}
 
Example 13
Source File: XBaseInputMeta.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
/**
 * Since the exported transformation that runs this will reside in a ZIP file, we can't reference files relatively. So
 * what this does is turn the name of files into absolute paths OR it simply includes the resource in the ZIP file.
 * For now, we'll simply turn it into an absolute path and pray that the file is on a shared drive or something like
 * that.
 *
 * @param space
 *          the variable space to use
 * @param definitions
 * @param resourceNamingInterface
 * @param repository
 *          The repository to optionally load other resources from (to be converted to XML)
 * @param metaStore
 *          the metaStore in which non-kettle metadata could reside.
 *
 * @return the filename of the exported resource
 */
@Override
public String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions,
  ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ) throws KettleException {
  try {
    // The object that we're modifying here is a copy of the original!
    // So let's change the filename from relative to absolute by grabbing the file object...
    // In case the name of the file comes from previous steps, forget about this!
    //
    if ( !acceptingFilenames ) {
      // From : ${Internal.Transformation.Filename.Directory}/../foo/bar.dbf
      // To : /home/matt/test/files/foo/bar.dbf
      //
      FileObject fileObject = KettleVFS.getFileObject( space.environmentSubstitute( dbfFileName ), space );

      // If the file doesn't exist, forget about this effort too!
      //
      if ( fileObject.exists() ) {
        // Convert to an absolute path...
        //
        dbfFileName = resourceNamingInterface.nameResource( fileObject, space, true );

        return dbfFileName;
      }
    }
    return null;
  } catch ( Exception e ) {
    throw new KettleException( e );
  }
}
 
Example 14
Source File: JobEntryFTPSGetIT.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@Test
public void downloadFile_WhenDestinationIsSetDirectly() throws Exception {
  JobEntryFTPSGet job = createCommonJob();
  job.setTargetDirectory( ramDir );

  FileObject downloaded = KettleVFS.getFileObject( ramDir + "/" + FtpsServer.SAMPLE_FILE );
  assertFalse( downloaded.exists() );
  try{
    job.execute( new Result(), 1 );
    downloaded = KettleVFS.getFileObject( ramDir + "/" + FtpsServer.SAMPLE_FILE );
    assertTrue( downloaded.exists() );
  } finally {
    downloaded.delete();
  }
}
 
Example 15
Source File: PurRepositoryIT.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
@Test
public void testExport() throws Exception {
  final String exportFileName = new File( "test.export" ).getAbsolutePath(); //$NON-NLS-1$

  RepositoryDirectoryInterface rootDir = initRepo();
  String uniqueTransName = EXP_TRANS_NAME.concat( EXP_DBMETA_NAME );
  TransMeta transMeta = createTransMeta( EXP_DBMETA_NAME );

  // Create a database association
  DatabaseMeta dbMeta = createDatabaseMeta( EXP_DBMETA_NAME );
  repository.save( dbMeta, VERSION_COMMENT_V1, null );

  TableInputMeta tableInputMeta = new TableInputMeta();
  tableInputMeta.setDatabaseMeta( dbMeta );

  transMeta.addStep( new StepMeta( EXP_TRANS_STEP_1_NAME, tableInputMeta ) );

  RepositoryDirectoryInterface transDir = rootDir.findDirectory( DIR_TRANSFORMATIONS );
  repository.save( transMeta, VERSION_COMMENT_V1, null );
  deleteStack.push( transMeta ); // So this transformation is cleaned up afterward
  assertNotNull( transMeta.getObjectId() );
  ObjectRevision version = transMeta.getObjectRevision();
  assertNotNull( version );
  assertTrue( hasVersionWithComment( transMeta, VERSION_COMMENT_V1 ) );
  assertTrue( repository.exists( uniqueTransName, transDir, RepositoryObjectType.TRANSFORMATION ) );

  JobMeta jobMeta = createJobMeta( EXP_JOB_NAME );
  RepositoryDirectoryInterface jobsDir = rootDir.findDirectory( DIR_JOBS );
  repository.save( jobMeta, VERSION_COMMENT_V1, null );
  deleteStack.push( jobMeta );
  assertNotNull( jobMeta.getObjectId() );
  version = jobMeta.getObjectRevision();
  assertNotNull( version );
  assertTrue( hasVersionWithComment( jobMeta, VERSION_COMMENT_V1 ) );
  assertTrue( repository.exists( EXP_JOB_NAME, jobsDir, RepositoryObjectType.JOB ) );

  LogListener errorLogListener = new LogListener( LogLevel.ERROR );
  KettleLogStore.getAppender().addLoggingEventListener( errorLogListener );

  try {
    repository.getExporter().exportAllObjects( new MockProgressMonitorListener(), exportFileName, null, "all" ); //$NON-NLS-1$
    FileObject exportFile = KettleVFS.getFileObject( exportFileName );
    assertFalse( "file left open", exportFile.getContent().isOpen() );
    assertNotNull( exportFile );
    MockRepositoryExportParser parser = new MockRepositoryExportParser();
    SAXParserFactory.newInstance().newSAXParser().parse( KettleVFS.getInputStream( exportFile ), parser );
    if ( parser.getFatalError() != null ) {
      throw parser.getFatalError();
    }
    assertNotNull( "No nodes found in export", parser.getNodeNames() ); //$NON-NLS-1$
    assertTrue( "No nodes found in export", !parser.getNodeNames().isEmpty() ); //$NON-NLS-1$
    assertEquals( "Incorrect number of nodes", 5, parser.getNodeNames().size() ); //$NON-NLS-1$
    assertEquals( "Incorrect number of transformations", 1, parser.getNodesWithName( "transformation" ).size() ); //$NON-NLS-1$ //$NON-NLS-2$
    assertEquals( "Incorrect number of jobs", 1, parser.getNodesWithName( "job" ).size() ); //$NON-NLS-1$ //$NON-NLS-2$
    assertTrue( "log error", errorLogListener.getEvents().isEmpty() );

  } finally {
    KettleVFS.getFileObject( exportFileName ).delete();
    KettleLogStore.getAppender().removeLoggingEventListener( errorLogListener );
  }
}
 
Example 16
Source File: JsonOutput.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public boolean openNewFile() {
  if ( data.writer != null ) {
    return true;
  }
  boolean retval = false;

  try {

    if ( meta.isServletOutput() ) {
      data.writer = getTrans().getServletPrintWriter();
    } else {
      String filename = buildFilename();
      createParentFolder( filename );
      if ( meta.AddToResult() ) {
        // Add this to the result file names...
        ResultFile resultFile =
          new ResultFile(
            ResultFile.FILE_TYPE_GENERAL, KettleVFS.getFileObject( filename, getTransMeta() ),
            getTransMeta().getName(), getStepname() );
        resultFile.setComment( BaseMessages.getString( PKG, "JsonOutput.ResultFilenames.Comment" ) );
        addResultFile( resultFile );
      }

      OutputStream outputStream;
      OutputStream fos = KettleVFS.getOutputStream( filename, getTransMeta(), meta.isFileAppended() );
      outputStream = fos;

      if ( !Utils.isEmpty( meta.getEncoding() ) ) {
        data.writer =
          new OutputStreamWriter( new BufferedOutputStream( outputStream, 5000 ), environmentSubstitute( meta
            .getEncoding() ) );
      } else {
        data.writer = new OutputStreamWriter( new BufferedOutputStream( outputStream, 5000 ) );
      }

      if ( log.isDetailed() ) {
        logDetailed( BaseMessages.getString( PKG, "JsonOutput.FileOpened", filename ) );
      }

      data.splitnr++;
    }

    retval = true;

  } catch ( Exception e ) {
    logError( BaseMessages.getString( PKG, "JsonOutput.Error.OpeningFile", e.toString() ) );
  }

  return retval;
}
 
Example 17
Source File: JsonInputTest.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
@Test
public void testFileList() throws Exception {
  ByteArrayOutputStream err = new ByteArrayOutputStream();
  helper.redirectLog( err, LogLevel.ERROR );

  final String input1 = getBasicTestJson();
  final String input2 = "{ \"store\": { \"book\": [ { \"price\": 9.99 } ] } }";
  try ( FileObject fileObj1 = KettleVFS.getFileObject( BASE_RAM_DIR + "test1.json" );
        FileObject fileObj2 = KettleVFS.getFileObject( BASE_RAM_DIR + "test2.json" ) ) {
    try ( OutputStream out = fileObj1.getContent().getOutputStream() ) {
      out.write( input1.getBytes() );
    }
    try ( OutputStream out = fileObj2.getContent().getOutputStream() ) {
      out.write( input2.getBytes() );
    }
    JsonInputField price = new JsonInputField();
    price.setName( "price" );
    price.setType( ValueMetaInterface.TYPE_NUMBER );
    price.setPath( "$..book[*].price" );
    List<FileObject> fileList = Arrays.asList( fileObj1, fileObj2 );
    JsonInputMeta meta = createFileListMeta( fileList );
    meta.setInputFields( new JsonInputField[] { price } );

    meta.setIncludeRowNumber( true );
    meta.setRowNumberField( "rownbr" );

    meta.setShortFileNameField( "fname" );

    JsonInput jsonInput = createJsonInput( meta );
    RowComparatorListener rowComparator = new RowComparatorListener(
      new Object[] { 8.95d, 1L, "test1.json" },
      new Object[] { 12.99d, 2L, "test1.json" },
      new Object[] { 8.99d, 3L, "test1.json" },
      new Object[] { 22.99d, 4L, "test1.json" },
      new Object[] { 9.99d, 5L, "test2.json" } );
    jsonInput.addRowListener( rowComparator );

    processRows( jsonInput, 5 );
    disposeJsonInput( jsonInput );
    assertEquals( err.toString(), 0, jsonInput.getErrors() );
  } finally {
    deleteFiles();
  }
}
 
Example 18
Source File: Trans.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
/**
 * Sets the internal kettle variables.
 *
 * @param var the new internal kettle variables
 */
public void setInternalKettleVariables( VariableSpace var ) {
  boolean hasFilename = transMeta != null && !Utils.isEmpty( transMeta.getFilename() );
  if ( hasFilename ) { // we have a finename that's defined.
    try {
      FileObject fileObject = KettleVFS.getFileObject( transMeta.getFilename(), var );
      FileName fileName = fileObject.getName();

      // The filename of the transformation
      variables.setVariable( Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_NAME, fileName.getBaseName() );

      // The directory of the transformation
      FileName fileDir = fileName.getParent();
      variables.setVariable( Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_DIRECTORY, fileDir.getURI() );
    } catch ( KettleFileException e ) {
      variables.setVariable( Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_DIRECTORY, "" );
      variables.setVariable( Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_NAME, "" );
    }
  } else {
    variables.setVariable( Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_DIRECTORY, "" );
    variables.setVariable( Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_NAME, "" );
  }

  boolean hasRepoDir = transMeta.getRepositoryDirectory() != null && transMeta.getRepository() != null;

  // The name of the transformation
  variables.setVariable( Const.INTERNAL_VARIABLE_TRANSFORMATION_NAME, Const.NVL( transMeta.getName(), "" ) );

  // setup fallbacks
  if ( hasRepoDir ) {
    variables.setVariable( Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_DIRECTORY, variables.getVariable(
      Const.INTERNAL_VARIABLE_TRANSFORMATION_REPOSITORY_DIRECTORY ) );
  } else {
    variables.setVariable( Const.INTERNAL_VARIABLE_TRANSFORMATION_REPOSITORY_DIRECTORY, variables.getVariable(
      Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_DIRECTORY ) );
  }

  // TODO PUT THIS INSIDE OF THE "IF"
  // The name of the directory in the repository
  variables.setVariable( Const.INTERNAL_VARIABLE_TRANSFORMATION_REPOSITORY_DIRECTORY, transMeta
    .getRepositoryDirectory() != null ? transMeta.getRepositoryDirectory().getPath() : "" );

  // Here we don't clear the definition of the job specific parameters, as they may come in handy.
  // A transformation can be called from a job and may inherit the job internal variables
  // but the other around is not possible.

  if ( hasRepoDir ) {
    variables.setVariable( Const.INTERNAL_VARIABLE_ENTRY_CURRENT_DIRECTORY, variables.getVariable(
      Const.INTERNAL_VARIABLE_TRANSFORMATION_REPOSITORY_DIRECTORY ) );
    if ( "/".equals( variables.getVariable( Const.INTERNAL_VARIABLE_ENTRY_CURRENT_DIRECTORY ) ) ) {
      variables.setVariable( Const.INTERNAL_VARIABLE_ENTRY_CURRENT_DIRECTORY, "" );
    }
  }

  setInternalEntryCurrentDirectory( hasFilename, hasRepoDir );

}
 
Example 19
Source File: JobEntryExportRepository.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
private boolean processOneFolder( Job parentJob, Result result, LogChannelInterface log,
  RepositoryDirectoryInterface repdir, String realoutfilename, int folderno, int totalfolders ) {
  boolean retval = false;
  try {
    if ( !repdir.isRoot() ) {
      if ( repdir.toString().lastIndexOf( "/" ) == 0 ) {
        String filename = repdir.toString().replace( "/", "" );
        String foldername = realoutfilename;
        if ( newfolder ) {
          foldername = realoutfilename + Const.FILE_SEPARATOR + filename;
          this.file = KettleVFS.getFileObject( foldername, this );
          if ( !this.file.exists() ) {
            this.file.createFolder();
          }
        }

        filename = foldername + Const.FILE_SEPARATOR + buildFilename( filename ) + ".xml";
        this.file = KettleVFS.getFileObject( filename, this );

        if ( this.file.exists() ) {
          if ( iffileexists.equals( If_FileExists_Skip ) ) {
            // Skip this folder
            return true;
          } else if ( iffileexists.equals( If_FileExists_Uniquename ) ) {
            filename = realoutfilename + Const.FILE_SEPARATOR + buildUniqueFilename( filename ) + ".xml";
          } else if ( iffileexists.equals( If_FileExists_Fail ) ) {
            // Fail
            return false;
          }
        }

        // System.out.print(filename + "\n");
        if ( log.isDetailed() ) {
          logDetailed( "---" );
          logDetailed( BaseMessages.getString(
            PKG, "JobExportRepository.Log.FolderProcessing", "" + folderno, "" + totalfolders ) );
          logDetailed( BaseMessages.getString(
            PKG, "JobExportRepository.Log.OutFilename", repdir.toString(), filename ) );
        }

        new RepositoryExporter( this.repository ).exportAllObjects( null, filename, repdir, "all" );
        if ( log.isDetailed() ) {
          logDetailed( BaseMessages.getString(
            PKG, "JobExportRepository.Log.OutFilenameEnd", repdir.toString(), filename ) );
        }

        if ( add_result_filesname ) {
          addFileToResultFilenames( filename, log, result, parentJob );
        }

      }
    } // end if root
    retval = true;
  } catch ( Exception e ) {
    // Update errors
    updateErrors();
    logError( BaseMessages.getString( PKG, "JobExportRepository.ErrorExportingFolder", repdir.toString(), e
      .toString() ) );
  }
  return retval;
}
 
Example 20
Source File: IngresVectorwiseLoader.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
/**
 * Create the command line for a sql process depending on the meta information supplied.
 *
 * @param meta
 *          The meta data to create the command line from
 *
 * @return The string to execute.
 *
 * @throws KettleException
 *           Upon any exception
 */
public String createCommandLine( IngresVectorwiseLoaderMeta meta ) throws KettleException {
  StringBuilder sb = new StringBuilder( 300 );

  if ( !Utils.isEmpty( meta.getSqlPath() ) ) {
    try {
      FileObject fileObject = KettleVFS.getFileObject( environmentSubstitute( meta.getSqlPath() ), getTransMeta() );
      String sqlexec = Const.optionallyQuoteStringByOS( KettleVFS.getFilename( fileObject ) );
      sb.append( sqlexec );
      // sql @tc-dwh-test.timocom.net,tcp_ip,VW[ingres,pwd]::dwh
    } catch ( KettleFileException ex ) {
      throw new KettleException( "Error retrieving command string", ex );
    }
  } else {
    if ( meta.isUsingVwload() ) {
      if ( isDetailed() ) {
        logDetailed( "vwload defaults to system path" );
      }
      sb.append( "vwload" );
    } else {
      if ( isDetailed() ) {
        logDetailed( "sql defaults to system path" );
      }
      sb.append( "sql" );
    }
  }

  DatabaseMeta dm = meta.getDatabaseMeta();
  if ( dm != null ) {
    String databaseName = environmentSubstitute( Const.NVL( dm.getDatabaseName(), "" ) );
    String password =
      Encr.decryptPasswordOptionallyEncrypted( environmentSubstitute( Const.NVL( dm.getDatabaseInterface()
        .getPassword(), "" ) ) );
    String port = environmentSubstitute( Const.NVL( dm.getDatabasePortNumberString(), "" ) ).replace( "7", "" );
    String username = environmentSubstitute( Const.NVL( dm.getDatabaseInterface().getUsername(), "" ) );
    String hostname = environmentSubstitute( Const.NVL( dm.getDatabaseInterface().getHostname(), "" ) );
    String schemaTable = dm.getQuotedSchemaTableCombination( null, environmentSubstitute( meta.getTableName() ) );
    String encoding = environmentSubstitute( Const.NVL( meta.getEncoding(), "" ) );
    String fifoFile =
      Const.optionallyQuoteStringByOS( environmentSubstitute( Const.NVL( meta.getFifoFileName(), "" ) ) );
    String errorFile =
      Const.optionallyQuoteStringByOS( environmentSubstitute( Const.NVL( meta.getErrorFileName(), "" ) ) );
    int maxNrErrors = Const.toInt( environmentSubstitute( Const.NVL( meta.getMaxNrErrors(), "0" ) ), 0 );

    if ( meta.isUsingVwload() ) {
      sb.append( " -u " ).append( username );
      sb.append( " -P " ).append( password );
      sb.append( " -f " ).append( meta.getDelimiter() ).append( "" );
      sb.append( " -t " ).append( schemaTable );

      if ( !Utils.isEmpty( encoding ) ) {
        sb.append( " -C " ).append( encoding );
      }
      if ( !Utils.isEmpty( errorFile ) ) {
        sb.append( " -l " ).append( errorFile );
      }
      if ( maxNrErrors > 0 ) {
        // need multiplication for two because every wrong rows
        // provide 2 errors that is not evident
        sb.append( " -x " ).append( maxNrErrors * 2 );
      }
      sb.append( " " ).append( databaseName );
      sb.append( " " ).append( fifoFile );

    } else if ( meta.isUseDynamicVNode() ) {
      // logical portname in JDBC use a 7

      sb.append( " @" ).append( hostname ).append( "," ).append( port ).append( "[" ).append( username ).append( "," )
        .append( password ).append( "]::" ).append( databaseName );
    } else {
      // Database Name
      //
      sb.append( " " ).append( databaseName );
      if ( meta.isUseAuthentication() ) {
        sb.append( "-P" ).append( password );
      }
    }
  } else {
    throw new KettleException( "No connection specified" );
  }

  return sb.toString();
}