Java Code Examples for org.pentaho.di.core.Const#FILE_SEPARATOR

The following examples show how to use org.pentaho.di.core.Const#FILE_SEPARATOR . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: BasePluginType.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
/**
 * Create a new URL class loader with the jar file specified. Also include all the jar files in the lib folder next to
 * that file.
 *
 * @param jarFileUrl
 *          The jar file to include
 * @param classLoader
 *          the parent class loader to use
 * @return The URL class loader
 */
protected URLClassLoader createUrlClassLoader( URL jarFileUrl, ClassLoader classLoader ) {
  List<URL> urls = new ArrayList<>();

  // Also append all the files in the underlying lib folder if it exists...
  //
  try {
    String libFolderName = new File( URLDecoder.decode( jarFileUrl.getFile(), "UTF-8" ) ).getParent()
      + Const.FILE_SEPARATOR + "lib";
    if ( new File( libFolderName ).exists() ) {
      PluginFolder pluginFolder = new PluginFolder( libFolderName, false, true, searchLibDir );
      FileObject[] libFiles = pluginFolder.findJarFiles( true );
      for ( FileObject libFile : libFiles ) {
        urls.add( libFile.getURL() );
      }
    }
  } catch ( Exception e ) {
    LogChannel.GENERAL.logError( "Unexpected error searching for jar files in lib/ folder next to '"
      + jarFileUrl + "'", e );
  }

  urls.add( jarFileUrl );

  return new KettleURLClassLoader( urls.toArray( new URL[urls.size()] ), classLoader );
}
 
Example 2
Source File: PentahoMapReduceJobBuilderImplTest.java    From pentaho-hadoop-shims with Apache License 2.0 6 votes vote down vote up
@Test
public void testSubmitEmptyInstallId() throws IOException {
  Configuration conf = mock( Configuration.class );
  FileSystem fileSystem = mock( FileSystem.class );
  when( hadoopShim.getFileSystem( conf ) ).thenReturn( fileSystem );
  when( conf.get( PentahoMapReduceJobBuilderImpl.PENTAHO_MAPREDUCE_PROPERTY_USE_DISTRIBUTED_CACHE ) )
    .thenReturn( "true" );
  String installPath = "/path" + Const.FILE_SEPARATOR;
  when( conf.get( PentahoMapReduceJobBuilderImpl.PENTAHO_MAPREDUCE_PROPERTY_KETTLE_HDFS_INSTALL_DIR ) )
    .thenReturn( installPath );
  try {
    pentahoMapReduceJobBuilder.submit( conf, "" );
  } catch ( IOException e ) {
    // Ignore
  }
  verify( fileSystem ).asPath( installPath, pentahoMapReduceJobBuilder.getInstallId() );
}
 
Example 3
Source File: ConstUI.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
/**
 * Return the tree path seperated by Const.FILE_SEPARATOR, starting from a certain depth in the tree.
 *
 * @param ti
 *          The TreeItem to get the path for
 * @param from
 *          The depth to start at, use 0 to get the complete tree.
 * @return The tree path.
 */
public static final String getTreePath( TreeItem ti, int from ) {
  String[] path = getTreeStrings( ti );

  if ( path == null ) {
    return null;
  }

  String retval = "";

  for ( int i = from; i < path.length; i++ ) {
    if ( !path[i].equalsIgnoreCase( Const.FILE_SEPARATOR ) ) {
      retval += Const.FILE_SEPARATOR + path[i];
    }
  }

  return retval;
}
 
Example 4
Source File: PentahoMapReduceJobBuilderImplTest.java    From pentaho-hadoop-shims with Apache License 2.0 5 votes vote down vote up
@Test
public void testSubmitAlreadyInstalled() throws Exception {
  Configuration conf = mock( Configuration.class );
  JobConf jobConf = mock( JobConf.class );
  when( jobConf.getCredentials() ).thenReturn( new Credentials() );

  when( conf.getAsDelegateConf( any() ) ).thenReturn( jobConf );
  FileSystem fileSystem = mock( FileSystem.class );
  DistributedCacheUtil distributedCacheUtil = mock( DistributedCacheUtil.class );
  Path kettleEnvInstallDir = mock( Path.class );
  URI kettleEnvInstallDirUri = new URI( "http://testUri/path" );
  when( kettleEnvInstallDir.toUri() ).thenReturn( kettleEnvInstallDirUri );

  when( hadoopShim.getFileSystem( conf ) ).thenReturn( fileSystem );
  when( hadoopShim.getDistributedCacheUtil() ).thenReturn( distributedCacheUtil );
  when( conf.get( PentahoMapReduceJobBuilderImpl.PENTAHO_MAPREDUCE_PROPERTY_USE_DISTRIBUTED_CACHE ) )
    .thenReturn( "true" );
  String installPath = "/path" + Const.FILE_SEPARATOR;
  when( conf.get( PentahoMapReduceJobBuilderImpl.PENTAHO_MAPREDUCE_PROPERTY_KETTLE_HDFS_INSTALL_DIR ) )
    .thenReturn( installPath );
  String installId = "install_id";
  when( conf.get( PentahoMapReduceJobBuilderImpl.PENTAHO_MAPREDUCE_PROPERTY_KETTLE_INSTALLATION_ID ) )
    .thenReturn( installId );
  when( fileSystem.asPath( installPath, installId ) ).thenReturn( kettleEnvInstallDir );
  when( distributedCacheUtil.isKettleEnvironmentInstalledAt( fileSystem, kettleEnvInstallDir ) ).thenReturn( true );
  String mapreduceClasspath = "mapreduceClasspath";
  when( conf.get( PentahoMapReduceJobBuilderImpl.MAPREDUCE_APPLICATION_CLASSPATH,
    PentahoMapReduceJobBuilderImpl.DEFAULT_MAPREDUCE_APPLICATION_CLASSPATH ) ).thenReturn( mapreduceClasspath );

  pentahoMapReduceJobBuilder.submit( conf, "" );
  verify( logChannelInterface ).logBasic( BaseMessages.getString( PentahoMapReduceJobBuilderImpl.PKG,
    PentahoMapReduceJobBuilderImpl.JOB_ENTRY_HADOOP_TRANS_JOB_EXECUTOR_CONFIGURING_JOB_WITH_KETTLE_AT,
    kettleEnvInstallDirUri.getPath() ) );
  verify( conf ).set( PentahoMapReduceJobBuilderImpl.MAPREDUCE_APPLICATION_CLASSPATH,
    PentahoMapReduceJobBuilderImpl.CLASSES + mapreduceClasspath );
  verify( distributedCacheUtil ).configureWithKettleEnvironment( conf, fileSystem, kettleEnvInstallDir );
}
 
Example 5
Source File: PentahoMapReduceJobBuilderImplTest.java    From pentaho-hadoop-shims with Apache License 2.0 5 votes vote down vote up
@Test( expected = IOException.class )
public void testSubmitNoPmrArchive() throws IOException, ConfigurationException, URISyntaxException {
  Configuration conf = mock( Configuration.class );
  FileSystem fileSystem = mock( FileSystem.class );
  DistributedCacheUtil distributedCacheUtil = mock( DistributedCacheUtil.class );
  Path kettleEnvInstallDir = mock( Path.class );
  URI kettleEnvInstallDirUri = new URI( "http://testUri/path" );
  when( kettleEnvInstallDir.toUri() ).thenReturn( kettleEnvInstallDirUri );

  when( hadoopShim.getFileSystem( conf ) ).thenReturn( fileSystem );
  when( hadoopShim.getDistributedCacheUtil() ).thenReturn( distributedCacheUtil );
  when( conf.get( PentahoMapReduceJobBuilderImpl.PENTAHO_MAPREDUCE_PROPERTY_USE_DISTRIBUTED_CACHE ) )
    .thenReturn( "true" );
  String installPath = "/path" + Const.FILE_SEPARATOR;
  when( conf.get( PentahoMapReduceJobBuilderImpl.PENTAHO_MAPREDUCE_PROPERTY_KETTLE_HDFS_INSTALL_DIR ) )
    .thenReturn( installPath );
  String installId = "install_id";
  when( conf.get( PentahoMapReduceJobBuilderImpl.PENTAHO_MAPREDUCE_PROPERTY_KETTLE_INSTALLATION_ID ) )
    .thenReturn( installId );
  when( fileSystem.asPath( installPath, installId ) ).thenReturn( kettleEnvInstallDir );
  when( distributedCacheUtil.isKettleEnvironmentInstalledAt( fileSystem, kettleEnvInstallDir ) ).thenReturn( false );
  String mapreduceClasspath = "mapreduceClasspath";
  when( conf.get( PentahoMapReduceJobBuilderImpl.MAPREDUCE_APPLICATION_CLASSPATH,
    PentahoMapReduceJobBuilderImpl.DEFAULT_MAPREDUCE_APPLICATION_CLASSPATH ) ).thenReturn( mapreduceClasspath );
  String archiveName = "archiveName";
  when( pmrArchiveGetter.getVfsFilename( conf ) ).thenReturn( archiveName );

  try {
    pentahoMapReduceJobBuilder.submit( conf, "" );
  } catch ( IOException e ) {
    assertEquals( BaseMessages.getString( PentahoMapReduceJobBuilderImpl.PKG,
      PentahoMapReduceJobBuilderImpl.JOB_ENTRY_HADOOP_TRANS_JOB_EXECUTOR_INSTALLATION_OF_KETTLE_FAILED ),
      e.getMessage() );
    assertEquals( BaseMessages.getString( PentahoMapReduceJobBuilderImpl.PKG,
      PentahoMapReduceJobBuilderImpl.JOB_ENTRY_HADOOP_TRANS_JOB_EXECUTOR_UNABLE_TO_LOCATE_ARCHIVE, archiveName )
      .trim(), e.getCause().getMessage().trim() );
    throw e;
  }
}
 
Example 6
Source File: MessagesStore.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
/**
 * Find a suitable filename to save this information in the specified locale and messages package. It needs a source
 * directory to save the package in
 *
 * @param directory
 *          the source directory to save the messages file in.
 * @return the filename that was generated.
 */
public String getSaveFilename( String directory ) {
  String localeUpperLower = locale.substring( 0, 3 ).toLowerCase() + locale.substring( 3 ).toUpperCase();

  String filename = "messages_" + localeUpperLower + ".properties";
  String path = messagesPackage.replace( '.', '/' );

  return directory
    + Const.FILE_SEPARATOR + path + Const.FILE_SEPARATOR + "messages" + Const.FILE_SEPARATOR + filename;
}
 
Example 7
Source File: JobEntryCopyMoveResultFilenames.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
private boolean processFile( FileObject sourcefile, String destinationFolder, Result result, Job parentJob,
  boolean deleteFile ) {
  boolean retval = false;

  try {
    if ( deleteFile ) {
      // delete file
      if ( sourcefile.delete() ) {
        if ( log.isDetailed() ) {
          logDetailed( BaseMessages.getString(
            PKG, "JobEntryCopyMoveResultFilenames.log.DeletedFile", sourcefile.toString() ) );
        }

        // Remove source file from result files list
        result.getResultFiles().remove( sourcefile.toString() );
        if ( log.isDetailed() ) {
          logDetailed( BaseMessages.getString(
            PKG, "JobEntryCopyMoveResultFilenames.RemovedFileFromResult", sourcefile.toString() ) );
        }

      } else {
        logError( BaseMessages.getString( PKG, "JobEntryCopyMoveResultFilenames.CanNotDeletedFile", sourcefile
          .toString() ) );
      }
    } else {
      // return destination short filename
      String shortfilename = getDestinationFilename( sourcefile.getName().getBaseName() );
      // build full destination filename
      String destinationFilename = destinationFolder + Const.FILE_SEPARATOR + shortfilename;
      FileObject destinationfile = KettleVFS.getFileObject( destinationFilename, this );
      boolean filexists = destinationfile.exists();
      if ( filexists ) {
        if ( log.isDetailed() ) {
          logDetailed( BaseMessages.getString(
            PKG, "JobEntryCopyMoveResultFilenames.Log.FileExists", destinationFilename ) );
        }
      }
      if ( ( !filexists ) || ( filexists && isOverwriteFile() ) ) {
        if ( getAction().equals( "copy" ) ) {
          // Copy file
          FileUtil.copyContent( sourcefile, destinationfile );
          if ( log.isDetailed() ) {
            logDetailed( BaseMessages.getString(
              PKG, "JobEntryCopyMoveResultFilenames.log.CopiedFile", sourcefile.toString(), destinationFolder ) );
          }
        } else {
          // Move file
          sourcefile.moveTo( destinationfile );
          if ( log.isDetailed() ) {
            logDetailed( BaseMessages.getString(
              PKG, "JobEntryCopyMoveResultFilenames.log.MovedFile", sourcefile.toString(), destinationFolder ) );
          }
        }
        if ( isRemovedSourceFilename() ) {
          // Remove source file from result files list
          result.getResultFiles().remove( sourcefile.toString() );
          if ( log.isDetailed() ) {
            logDetailed( BaseMessages.getString(
              PKG, "JobEntryCopyMoveResultFilenames.RemovedFileFromResult", sourcefile.toString() ) );
          }
        }
        if ( isAddDestinationFilename() ) {
          // Add destination filename to Resultfilenames ...
          ResultFile resultFile =
            new ResultFile( ResultFile.FILE_TYPE_GENERAL, KettleVFS.getFileObject(
              destinationfile.toString(), this ), parentJob.getJobname(), toString() );
          result.getResultFiles().put( resultFile.getFile().toString(), resultFile );
          if ( log.isDetailed() ) {
            logDetailed( BaseMessages.getString(
              PKG, "JobEntryCopyMoveResultFilenames.AddedFileToResult", destinationfile.toString() ) );
          }
        }
      }
    }
    retval = true;
  } catch ( Exception e ) {
    logError( BaseMessages.getString( PKG, "JobEntryCopyMoveResultFilenames.Log.ErrorProcessing", e.toString() ) );
  }

  return retval;
}
 
Example 8
Source File: BasePluginType.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
protected PluginInterface registerPluginFromXmlResource( Node pluginNode, String path,
  Class<? extends PluginTypeInterface> pluginType, boolean nativePlugin, URL pluginFolder ) throws KettlePluginException {
  try {

    String idAttr = XMLHandler.getTagAttribute( pluginNode, "id" );
    String description = getTagOrAttribute( pluginNode, "description" );
    String iconfile = getTagOrAttribute( pluginNode, "iconfile" );
    String tooltip = getTagOrAttribute( pluginNode, "tooltip" );
    String category = getTagOrAttribute( pluginNode, "category" );
    String classname = getTagOrAttribute( pluginNode, "classname" );
    String errorHelpfile = getTagOrAttribute( pluginNode, "errorhelpfile" );
    String documentationUrl = getTagOrAttribute( pluginNode, "documentation_url" );
    String casesUrl = getTagOrAttribute( pluginNode, "cases_url" );
    String forumUrl = getTagOrAttribute( pluginNode, "forum_url" );
    String suggestion = getTagOrAttribute( pluginNode, "suggestion" );

    Node libsnode = XMLHandler.getSubNode( pluginNode, "libraries" );
    int nrlibs = XMLHandler.countNodes( libsnode, "library" );

    List<String> jarFiles = new ArrayList<>();
    if ( path != null ) {
      for ( int j = 0; j < nrlibs; j++ ) {
        Node libnode = XMLHandler.getSubNodeByNr( libsnode, "library", j );
        String jarfile = XMLHandler.getTagAttribute( libnode, "name" );
        jarFiles.add( new File( path + Const.FILE_SEPARATOR + jarfile ).getAbsolutePath() );
      }
    }

    // Localized categories, descriptions and tool tips
    //
    Map<String, String> localizedCategories = readPluginLocale( pluginNode, "localized_category", "category" );
    category = getAlternativeTranslation( category, localizedCategories );

    Map<String, String> localDescriptions =
      readPluginLocale( pluginNode, "localized_description", "description" );
    description = getAlternativeTranslation( description, localDescriptions );
    description += addDeprecation( category );

    suggestion = getAlternativeTranslation( suggestion, localDescriptions );

    Map<String, String> localizedTooltips = readPluginLocale( pluginNode, "localized_tooltip", "tooltip" );
    tooltip = getAlternativeTranslation( tooltip, localizedTooltips );

    String iconFilename = ( path == null ) ? iconfile : path + Const.FILE_SEPARATOR + iconfile;
    String errorHelpFileFull = errorHelpfile;
    if ( !Utils.isEmpty( errorHelpfile ) ) {
      errorHelpFileFull = ( path == null ) ? errorHelpfile : path + Const.FILE_SEPARATOR + errorHelpfile;
    }

    Map<Class<?>, String> classMap = new HashMap<>();

    PluginMainClassType mainClassTypesAnnotation = pluginType.getAnnotation( PluginMainClassType.class );
    classMap.put( mainClassTypesAnnotation.value(), classname );

    // process annotated extra types
    PluginExtraClassTypes classTypesAnnotation = pluginType.getAnnotation( PluginExtraClassTypes.class );
    if ( classTypesAnnotation != null ) {
      for ( int i = 0; i < classTypesAnnotation.classTypes().length; i++ ) {
        Class<?> classType = classTypesAnnotation.classTypes()[i];
        String className = getTagOrAttribute( pluginNode, classTypesAnnotation.xmlNodeNames()[i] );

        classMap.put( classType, className );
      }
    }

    // process extra types added at runtime
    Map<Class<?>, String> objectMap = getAdditionalRuntimeObjectTypes();
    for ( Map.Entry<Class<?>, String> entry : objectMap.entrySet() ) {
      String clzName = getTagOrAttribute( pluginNode, entry.getValue() );
      classMap.put( entry.getKey(), clzName );
    }

    PluginInterface pluginInterface =
      new Plugin(
        idAttr.split( "," ), pluginType, mainClassTypesAnnotation.value(), category, description, tooltip,
        iconFilename, false, nativePlugin, classMap, jarFiles, errorHelpFileFull, pluginFolder,
        documentationUrl, casesUrl, forumUrl, suggestion );
    registry.registerPlugin( pluginType, pluginInterface );

    return pluginInterface;
  } catch ( Exception e ) {
    throw new KettlePluginException( BaseMessages.getString(
      PKG, "BasePluginType.RuntimeError.UnableToReadPluginXML.PLUGIN0001" ), e );
  }
}
 
Example 9
Source File: LanguageChoice.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public String getSettingsFilename() {
  return Const.getKettleDirectory() + Const.FILE_SEPARATOR + ".languageChoice";
}
 
Example 10
Source File: LucidDBBulkLoader.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public boolean execute( LucidDBBulkLoaderMeta meta, boolean wait ) throws KettleException {
  Runtime rt = Runtime.getRuntime();

  try {
    String tableName = environmentSubstitute( meta.getTableName() );

    // 1) Set up the FIFO folder, create the directory and path to it...
    //
    String fifoVfsDirectory = environmentSubstitute( meta.getFifoDirectory() );
    FileObject directory = KettleVFS.getFileObject( fifoVfsDirectory, getTransMeta() );
    directory.createFolder();
    String fifoDirectory = KettleVFS.getFilename( directory );

    // 2) Create the FIFO file using the "mkfifo" command...
    // Make sure to log all the possible output, also from STDERR
    //
    data.fifoFilename = KettleVFS.getFilename( directory ) + Const.FILE_SEPARATOR + tableName + ".csv";
    data.bcpFilename = KettleVFS.getFilename( directory ) + Const.FILE_SEPARATOR + tableName + ".bcp";

    File fifoFile = new File( data.fifoFilename );
    if ( !fifoFile.exists() ) {
      String mkFifoCmd = "mkfifo " + data.fifoFilename + "";
      logBasic( "Creating FIFO file using this command : " + mkFifoCmd );
      Process mkFifoProcess = rt.exec( mkFifoCmd );
      StreamLogger errorLogger = new StreamLogger( log, mkFifoProcess.getErrorStream(), "mkFifoError" );
      StreamLogger outputLogger = new StreamLogger( log, mkFifoProcess.getInputStream(), "mkFifoOuptut" );
      new Thread( errorLogger ).start();
      new Thread( outputLogger ).start();
      int result = mkFifoProcess.waitFor();
      if ( result != 0 ) {
        throw new Exception( "Return code " + result + " received from statement : " + mkFifoCmd );
      }
    }

    // 3) Make a connection to LucidDB for sending SQL commands
    // (Also, we need a clear cache for getting up-to-date target metadata)
    DBCache.getInstance().clear( meta.getDatabaseMeta().getName() );
    if ( meta.getDatabaseMeta() == null ) {
      logError( BaseMessages.getString( PKG, "LuciDBBulkLoader.Init.ConnectionMissing", getStepname() ) );
      return false;
    }
    data.db = new Database( this, meta.getDatabaseMeta() );
    data.db.shareVariablesWith( this );
    // Connect to the database
    if ( getTransMeta().isUsingUniqueConnections() ) {
      synchronized ( getTrans() ) {
        data.db.connect( getTrans().getTransactionId(), getPartitionID() );
      }
    } else {
      data.db.connect( getPartitionID() );
    }

    logBasic( "Connected to LucidDB" );

    // 4) Now we are ready to create the LucidDB FIFO server that will handle the actual bulk loading.
    //
    String fifoServerStatement = "";
    fifoServerStatement += "create or replace server " + meta.getFifoServerName() + Const.CR;
    fifoServerStatement += "foreign data wrapper sys_file_wrapper" + Const.CR;
    fifoServerStatement += "options (" + Const.CR;
    fifoServerStatement += "directory '" + fifoDirectory + "'," + Const.CR;
    fifoServerStatement += "file_extension 'csv'," + Const.CR;
    fifoServerStatement += "with_header 'no'," + Const.CR;
    fifoServerStatement += "num_rows_scan '0'," + Const.CR;
    fifoServerStatement += "lenient 'no');" + Const.CR;

    logBasic( "Creating LucidDB fifo_server with the following command: " + fifoServerStatement );
    data.db.execStatements( fifoServerStatement );

    // 5) Set the error limit in the LucidDB session
    // REVIEW jvs 13-Dec-2008: is this guaranteed to retain the same
    // connection?
    String errorMaxStatement = "";
    errorMaxStatement += "alter session set \"errorMax\" = " + meta.getMaxErrors() + ";" + Const.CR;
    logBasic( "Setting error limit in LucidDB session with the following command: " + errorMaxStatement );
    data.db.execStatements( errorMaxStatement );

    // 6) Now we also need to create a bulk loader file .bcp
    //
    createBulkLoadConfigFile( data.bcpFilename );

    // 7) execute the actual load command!
    // This will actually block until the load is done in the
    // separate execution thread; see notes in executeLoadCommand
    // on why it's important for this to occur BEFORE
    // opening our end of the FIFO.
    //
    executeLoadCommand( tableName );

    // 8) We have to write rows to the FIFO file later on.
    data.fifoStream = new BufferedOutputStream( new FileOutputStream( fifoFile ) );
  } catch ( Exception ex ) {
    throw new KettleException( ex );
  }

  return true;
}
 
Example 11
Source File: JobEntryFTPTest.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
@Test
public void testTargetFilenameWithDateTime() throws Exception {
  SimpleDateFormat yyyyMMdd = new SimpleDateFormat( "yyyyMMdd" );
  SimpleDateFormat HHmmssSSS = new SimpleDateFormat( "HHmmssSSS" );
  SimpleDateFormat yyyyMMddHHmmssSSS = new SimpleDateFormat( "yyyyMMdd_HHmmssSSS" );
  File destFolder = tempFolder.newFolder( "pdi5558" );
  destFolder.deleteOnExit();
  String destFolderName = destFolder.getAbsolutePath();
  JobEntryFTP entry = new JobEntryFTP();
  entry.setTargetDirectory( destFolderName );
  entry.setAddDateBeforeExtension( true );

  //Test Date-Only
  entry.setDateInFilename( true );
  assertNull( entry.returnTargetFilename( null ) );
  assertEquals( "Test Add Date without file extension",
    destFolderName + Const.FILE_SEPARATOR + "testFile_" + yyyyMMdd.format( new Date() ),
    entry.returnTargetFilename( "testFile" ) );
  assertEquals( "Test Add Date with file extension",
    destFolderName + Const.FILE_SEPARATOR + "testFile_" + yyyyMMdd.format( new Date() ) + ".txt",
    entry.returnTargetFilename( "testFile.txt" ) );

  //Test Date-and-Time
  entry.setTimeInFilename( true );
  String beforeString = destFolderName + Const.FILE_SEPARATOR + "testFile_" + yyyyMMddHHmmssSSS.format( new Date() ) + ".txt";
  String actualValue = entry.returnTargetFilename( "testFile.txt" );
  String afterString = destFolderName + Const.FILE_SEPARATOR + "testFile_" + yyyyMMddHHmmssSSS.format( new Date() ) + ".txt";

  Pattern expectedFormat = Pattern.compile(
    Pattern.quote( destFolderName + Const.FILE_SEPARATOR + "testFile_" + yyyyMMdd.format( new Date() ) + "_" )
    + "([\\d]{9})\\.txt" );
  assertTrue( "Output file matches expected format", expectedFormat.matcher( actualValue ).matches() );
  assertTrue( "The actual time is not too early for test run", actualValue.compareTo( beforeString ) >= 0 );
  assertTrue( "The actual time is not too late for test run", actualValue.compareTo( afterString ) <= 0 );

  //Test Time-Only
  entry.setDateInFilename( false );
  beforeString = destFolderName + Const.FILE_SEPARATOR + "testFile_" + HHmmssSSS.format( new Date() ) + ".txt";
  actualValue = entry.returnTargetFilename( "testFile.txt" );
  afterString = destFolderName + Const.FILE_SEPARATOR + "testFile_" + HHmmssSSS.format( new Date() ) + ".txt";

  expectedFormat = Pattern.compile(
    Pattern.quote( destFolderName + Const.FILE_SEPARATOR + "testFile_" ) + "([\\d]{9})\\.txt" );
  assertTrue( "Output file matches expected format", expectedFormat.matcher( actualValue ).matches() );
  assertTrue( "The actual time is not too early for test run", actualValue.compareTo( beforeString ) >= 0 );
  assertTrue( "The actual time is not too late for test run", actualValue.compareTo( afterString ) <= 0 );
}
 
Example 12
Source File: JobEntryExportRepository.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
private boolean processOneFolder( Job parentJob, Result result, LogChannelInterface log,
  RepositoryDirectoryInterface repdir, String realoutfilename, int folderno, int totalfolders ) {
  boolean retval = false;
  try {
    if ( !repdir.isRoot() ) {
      if ( repdir.toString().lastIndexOf( "/" ) == 0 ) {
        String filename = repdir.toString().replace( "/", "" );
        String foldername = realoutfilename;
        if ( newfolder ) {
          foldername = realoutfilename + Const.FILE_SEPARATOR + filename;
          this.file = KettleVFS.getFileObject( foldername, this );
          if ( !this.file.exists() ) {
            this.file.createFolder();
          }
        }

        filename = foldername + Const.FILE_SEPARATOR + buildFilename( filename ) + ".xml";
        this.file = KettleVFS.getFileObject( filename, this );

        if ( this.file.exists() ) {
          if ( iffileexists.equals( If_FileExists_Skip ) ) {
            // Skip this folder
            return true;
          } else if ( iffileexists.equals( If_FileExists_Uniquename ) ) {
            filename = realoutfilename + Const.FILE_SEPARATOR + buildUniqueFilename( filename ) + ".xml";
          } else if ( iffileexists.equals( If_FileExists_Fail ) ) {
            // Fail
            return false;
          }
        }

        // System.out.print(filename + "\n");
        if ( log.isDetailed() ) {
          logDetailed( "---" );
          logDetailed( BaseMessages.getString(
            PKG, "JobExportRepository.Log.FolderProcessing", "" + folderno, "" + totalfolders ) );
          logDetailed( BaseMessages.getString(
            PKG, "JobExportRepository.Log.OutFilename", repdir.toString(), filename ) );
        }

        new RepositoryExporter( this.repository ).exportAllObjects( null, filename, repdir, "all" );
        if ( log.isDetailed() ) {
          logDetailed( BaseMessages.getString(
            PKG, "JobExportRepository.Log.OutFilenameEnd", repdir.toString(), filename ) );
        }

        if ( add_result_filesname ) {
          addFileToResultFilenames( filename, log, result, parentJob );
        }

      }
    } // end if root
    retval = true;
  } catch ( Exception e ) {
    // Update errors
    updateErrors();
    logError( BaseMessages.getString( PKG, "JobExportRepository.ErrorExportingFolder", repdir.toString(), e
      .toString() ) );
  }
  return retval;
}
 
Example 13
Source File: JobEntryCopyFiles.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public boolean includeFile( FileSelectInfo info ) {
  boolean resultat = false;
  String fil_name = null;

  try {
    if ( info.getFile().getType() == FileType.FILE ) {
      if ( info.getFile().getName().getBaseName().equals( filename )
        && ( info.getFile().getParent().toString().equals( foldername ) ) ) {
        // check if the file exists
        fil_name = destfolder + Const.FILE_SEPARATOR + filename;

        if ( KettleVFS.getFileObject( fil_name, JobEntryCopyFiles.this ).exists() ) {
          if ( isDetailed() ) {
            logDetailed( "      " + BaseMessages.getString( PKG, "JobCopyFiles.Log.FileExists", KettleVFS.getFriendlyURI( fil_name ) ) );
          }

          if ( overwrite_files ) {
            if ( isDetailed() ) {
              logDetailed( "      "
                + BaseMessages.getString(
                  PKG, "JobCopyFiles.Log.FileOverwrite", KettleVFS.getFriendlyURI( info.getFile() ), KettleVFS.getFriendlyURI( fil_name ) ) );
            }

            resultat = true;
          }
        } else {
          if ( isDetailed() ) {
            logDetailed( "      "
              + BaseMessages.getString(
                PKG, "JobCopyFiles.Log.FileCopied", KettleVFS.getFriendlyURI( info.getFile() ), KettleVFS.getFriendlyURI( fil_name ) ) );
          }

          resultat = true;
        }
      }

      if ( resultat && remove_source_files ) {
        // add this folder/file to remove files
        // This list will be fetched and all entries files
        // will be removed
        list_files_remove.add( info.getFile().toString() );
      }

      if ( resultat && add_result_filesname ) {
        // add this folder/file to result files name
        list_add_result.add( KettleVFS.getFileObject( fil_name, JobEntryCopyFiles.this ).toString() );
      }
    }
  } catch ( Exception e ) {
    logError( BaseMessages.getString( PKG, "JobCopyFiles.Error.Exception.CopyProcess", KettleVFS.getFriendlyURI( info
      .getFile() ), KettleVFS.getFriendlyURI( fil_name ), e.getMessage() ) );

    resultat = false;
  }

  return resultat;
}
 
Example 14
Source File: PentahoMapReduceJobBuilderImpl.java    From pentaho-hadoop-shims with Apache License 2.0 4 votes vote down vote up
@Override
protected MapReduceJobAdvanced submit( Configuration conf, String shimIdentifier ) throws IOException {
  cleanOutputPath( conf );

  FileSystem fs = hadoopShim.getFileSystem( conf );

  if ( Boolean.parseBoolean( getProperty( conf, pmrProperties, PENTAHO_MAPREDUCE_PROPERTY_USE_DISTRIBUTED_CACHE,
    Boolean.toString( true ) ) ) ) {
    String installPath =
      getProperty( conf, pmrProperties, PENTAHO_MAPREDUCE_PROPERTY_KETTLE_HDFS_INSTALL_DIR, null );
    String mInstallId =
      getProperty( conf, pmrProperties, PENTAHO_MAPREDUCE_PROPERTY_KETTLE_INSTALLATION_ID, null );
    try {
      if ( Utils.isEmpty( installPath ) ) {
        throw new IllegalArgumentException( BaseMessages.getString( PKG,
          JOB_ENTRY_HADOOP_TRANS_JOB_EXECUTOR_KETTLE_HDFS_INSTALL_DIR_MISSING ) );
      }
      if ( Utils.isEmpty( mInstallId ) ) {
        mInstallId = this.installId;
      }
      if ( !installPath.endsWith( Const.FILE_SEPARATOR ) ) {
        installPath += Const.FILE_SEPARATOR;
      }

      Path kettleEnvInstallDir = fs.asPath( installPath, mInstallId );
      FileObject pmrLibArchive = pmrArchiveGetter.getPmrArchive( conf );

      // Make sure the version we're attempting to use is installed
      if ( hadoopShim.getDistributedCacheUtil().isKettleEnvironmentInstalledAt( fs, kettleEnvInstallDir ) ) {
        log.logDetailed( BaseMessages.getString( PKG, "JobEntryHadoopTransJobExecutor.UsingKettleInstallationFrom",
          kettleEnvInstallDir.toUri().getPath() ) );
      } else {
        // Load additional plugin folders as requested
        String additionalPluginNames =
          getProperty( conf, pmrProperties, PENTAHO_MAPREDUCE_PROPERTY_ADDITIONAL_PLUGINS, null );
        if ( pmrLibArchive == null ) {
          throw new KettleException(
            BaseMessages.getString( PKG, JOB_ENTRY_HADOOP_TRANS_JOB_EXECUTOR_UNABLE_TO_LOCATE_ARCHIVE,
              pmrArchiveGetter.getVfsFilename( conf ) ) );
        }

        log.logBasic( BaseMessages.getString( PKG, "JobEntryHadoopTransJobExecutor.InstallingKettleAt",
          kettleEnvInstallDir ) );

        FileObject bigDataPluginFolder = vfsPluginDirectory;
        hadoopShim.getDistributedCacheUtil()
          .installKettleEnvironment( pmrLibArchive, fs, kettleEnvInstallDir, bigDataPluginFolder,
            additionalPluginNames, shimIdentifier );

        log.logBasic( BaseMessages
          .getString( PKG, "JobEntryHadoopTransJobExecutor.InstallationOfKettleSuccessful", kettleEnvInstallDir ) );
      }

      stageMetaStoreForHadoop( conf, fs, installPath );

      log.logBasic( BaseMessages.getString( PKG, JOB_ENTRY_HADOOP_TRANS_JOB_EXECUTOR_CONFIGURING_JOB_WITH_KETTLE_AT,
        kettleEnvInstallDir.toUri().getPath() ) );

      String mapreduceClasspath =
        conf.get( MAPREDUCE_APPLICATION_CLASSPATH, DEFAULT_MAPREDUCE_APPLICATION_CLASSPATH );
      conf.set( MAPREDUCE_APPLICATION_CLASSPATH, CLASSES + mapreduceClasspath );

      hadoopShim.getDistributedCacheUtil().configureWithKettleEnvironment( conf, fs, kettleEnvInstallDir );
      log.logBasic( MAPREDUCE_APPLICATION_CLASSPATH + ": " + conf.get( MAPREDUCE_APPLICATION_CLASSPATH ) );
    } catch ( Exception ex ) {
      throw new IOException(
        BaseMessages.getString( PKG, JOB_ENTRY_HADOOP_TRANS_JOB_EXECUTOR_INSTALLATION_OF_KETTLE_FAILED ), ex );
    }
  }
  JobConf jobConf = conf.getAsDelegateConf( JobConf.class );
  jobConf.getCredentials().addAll( UserGroupInformation.getCurrentUser().getCredentials() );
  return super.submit( conf, shimIdentifier );
}
 
Example 15
Source File: JobEntryFTPSGet.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
/**
 * @param string
 *          the filename from the FTPS server
 *
 * @return the calculated target filename
 */
private String returnTargetFilename( String filename ) {
  String retval = null;
  // Replace possible environment variables...
  if ( filename != null ) {
    retval = filename;
  } else {
    return null;
  }

  int lenstring = retval.length();
  int lastindexOfDot = retval.lastIndexOf( "." );
  if ( lastindexOfDot == -1 ) {
    lastindexOfDot = lenstring;
  }

  if ( isAddDateBeforeExtension() ) {
    retval = retval.substring( 0, lastindexOfDot );
  }

  SimpleDateFormat daf = new SimpleDateFormat();
  Date now = new Date();

  if ( SpecifyFormat && !Utils.isEmpty( date_time_format ) ) {
    daf.applyPattern( date_time_format );
    String dt = daf.format( now );
    retval += dt;
  } else {
    if ( adddate ) {
      daf.applyPattern( "yyyyMMdd" );
      String d = daf.format( now );
      retval += "_" + d;
    }
    if ( addtime ) {
      daf.applyPattern( "HHmmssSSS" );
      String t = daf.format( now );
      retval += "_" + t;
    }
  }

  if ( isAddDateBeforeExtension() ) {
    retval += retval.substring( lastindexOfDot, lenstring );
  }

  // Add foldername to filename
  retval = localFolder + Const.FILE_SEPARATOR + retval;
  return retval;
}
 
Example 16
Source File: JobEntryFTP.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
/**
 * @param string
 *          the filename from the FTP server
 *
 * @return the calculated target filename
 */
@VisibleForTesting
String returnTargetFilename( String filename ) {
  String retval = null;
  // Replace possible environment variables...
  if ( filename != null ) {
    retval = filename;
  } else {
    return null;
  }

  int lenstring = retval.length();
  int lastindexOfDot = retval.lastIndexOf( "." );
  if ( lastindexOfDot == -1 ) {
    lastindexOfDot = lenstring;
  }

  String fileExtension = retval.substring( lastindexOfDot, lenstring );

  if ( isAddDateBeforeExtension() ) {
    retval = retval.substring( 0, lastindexOfDot );
  }

  SimpleDateFormat daf = new SimpleDateFormat();
  Date now = new Date();

  if ( SpecifyFormat && !Utils.isEmpty( date_time_format ) ) {
    daf.applyPattern( date_time_format );
    String dt = daf.format( now );
    retval += dt;
  } else {
    if ( adddate ) {
      daf.applyPattern( "yyyyMMdd" );
      String d = daf.format( now );
      retval += "_" + d;
    }
    if ( addtime ) {
      daf.applyPattern( "HHmmssSSS" );
      String t = daf.format( now );
      retval += "_" + t;
    }
  }

  if ( isAddDateBeforeExtension() ) {
    retval += fileExtension;
  }

  // Add foldername to filename
  retval = environmentSubstitute( targetDirectory ) + Const.FILE_SEPARATOR + retval;
  return retval;
}
 
Example 17
Source File: JobEntryUnZip.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
/**
 * Moving or deleting source file.
 */
private void doUnzipPostProcessing( FileObject sourceFileObject, FileObject movetodir, String realMovetodirectory ) throws FileSystemException {
  if ( afterunzip == 1 ) {
    // delete zip file
    boolean deleted = sourceFileObject.delete();
    if ( !deleted ) {
      updateErrors();
      logError( BaseMessages.getString( PKG, "JobUnZip.Cant_Delete_File.Label", sourceFileObject.toString() ) );
    }
    // File deleted
    if ( log.isDebug() ) {
      logDebug( BaseMessages.getString( PKG, "JobUnZip.File_Deleted.Label", sourceFileObject.toString() ) );
    }
  } else if ( afterunzip == 2 ) {
    FileObject destFile = null;
    // Move File
    try {
      String destinationFilename = movetodir + Const.FILE_SEPARATOR + sourceFileObject.getName().getBaseName();
      destFile = KettleVFS.getFileObject( destinationFilename, this );

      sourceFileObject.moveTo( destFile );

      // File moved
      if ( log.isDetailed() ) {
        logDetailed( BaseMessages.getString(
          PKG, "JobUnZip.Log.FileMovedTo", sourceFileObject.toString(), realMovetodirectory ) );
      }
    } catch ( Exception e ) {
      updateErrors();
      logError( BaseMessages.getString(
        PKG, "JobUnZip.Cant_Move_File.Label", sourceFileObject.toString(), realMovetodirectory, e
          .getMessage() ) );
    } finally {
      if ( destFile != null ) {
        try {
          destFile.close();
        } catch ( IOException ex ) { /* Ignore */
        }
      }
    }
  }
}
 
Example 18
Source File: PentahoMapReduceJobBuilderImplTest.java    From pentaho-hadoop-shims with Apache License 2.0 4 votes vote down vote up
@Test
public void testSubmitInstallSucceed()
  throws Exception {
  Configuration conf = mock( Configuration.class );
  JobConf jobConf = mock( JobConf.class );
  when( jobConf.getCredentials() ).thenReturn( new Credentials() );
  when( conf.getAsDelegateConf( any() ) ).thenReturn( jobConf );

  FileSystem fileSystem = mock( FileSystem.class );
  DistributedCacheUtil distributedCacheUtil = mock( DistributedCacheUtil.class );
  Path kettleEnvInstallDir = mock( Path.class );
  URI kettleEnvInstallDirUri = new URI( "http://testUri/path" );
  when( kettleEnvInstallDir.toUri() ).thenReturn( kettleEnvInstallDirUri );

  when( hadoopShim.getFileSystem( conf ) ).thenReturn( fileSystem );
  when( hadoopShim.getDistributedCacheUtil() ).thenReturn( distributedCacheUtil );
  when( conf.get( PentahoMapReduceJobBuilderImpl.PENTAHO_MAPREDUCE_PROPERTY_USE_DISTRIBUTED_CACHE ) )
    .thenReturn( "true" );
  String installPath = "/path" + Const.FILE_SEPARATOR;
  when( conf.get( PentahoMapReduceJobBuilderImpl.PENTAHO_MAPREDUCE_PROPERTY_KETTLE_HDFS_INSTALL_DIR ) )
    .thenReturn( installPath );
  String installId = "install_id";
  when( conf.get( PentahoMapReduceJobBuilderImpl.PENTAHO_MAPREDUCE_PROPERTY_KETTLE_INSTALLATION_ID ) )
    .thenReturn( installId );
  when( fileSystem.asPath( installPath, installId ) ).thenReturn( kettleEnvInstallDir );
  when( distributedCacheUtil.isKettleEnvironmentInstalledAt( fileSystem, kettleEnvInstallDir ) ).thenReturn( false )
    .thenReturn( true );
  String mapreduceClasspath = "mapreduceClasspath";
  when( conf.get( PentahoMapReduceJobBuilderImpl.MAPREDUCE_APPLICATION_CLASSPATH,
    PentahoMapReduceJobBuilderImpl.DEFAULT_MAPREDUCE_APPLICATION_CLASSPATH ) ).thenReturn( mapreduceClasspath );
  when( pmrArchiveGetter.getPmrArchive( conf ) ).thenReturn( mock( FileObject.class ) );

  pentahoMapReduceJobBuilder.submit( conf, "" );

  verify( logChannelInterface ).logBasic( BaseMessages.getString( PentahoMapReduceJobBuilderImpl.PKG,
    PentahoMapReduceJobBuilderImpl.JOB_ENTRY_HADOOP_TRANS_JOB_EXECUTOR_CONFIGURING_JOB_WITH_KETTLE_AT,
    kettleEnvInstallDirUri.getPath() ) );
  verify( conf ).set( PentahoMapReduceJobBuilderImpl.MAPREDUCE_APPLICATION_CLASSPATH,
    PentahoMapReduceJobBuilderImpl.CLASSES + mapreduceClasspath );
  verify( distributedCacheUtil ).configureWithKettleEnvironment( conf, fileSystem, kettleEnvInstallDir );
}
 
Example 19
Source File: PentahoMapReduceJobBuilderImplTest.java    From pentaho-hadoop-shims with Apache License 2.0 4 votes vote down vote up
@Test( /*expected = IOException.class*/ )
public void testSubmitInstallFail()
  throws URISyntaxException, IOException, ConfigurationException, KettleFileException {
  Configuration conf = mock( Configuration.class );
  FileSystem fileSystem = mock( FileSystem.class );
  DistributedCacheUtil distributedCacheUtil = mock( DistributedCacheUtil.class );
  Path kettleEnvInstallDir = mock( Path.class );
  URI kettleEnvInstallDirUri = new URI( "http://testUri/path" );
  when( kettleEnvInstallDir.toUri() ).thenReturn( kettleEnvInstallDirUri );

  when( hadoopShim.getFileSystem( conf ) ).thenReturn( fileSystem );
  when( hadoopShim.getDistributedCacheUtil() ).thenReturn( distributedCacheUtil );
  when( conf.get( PentahoMapReduceJobBuilderImpl.PENTAHO_MAPREDUCE_PROPERTY_USE_DISTRIBUTED_CACHE ) )
    .thenReturn( "true" );
  String installPath = "/path";
  when( conf.get( PentahoMapReduceJobBuilderImpl.PENTAHO_MAPREDUCE_PROPERTY_KETTLE_HDFS_INSTALL_DIR ) )
    .thenReturn( installPath );
  JobConf jobConf = mock( JobConf.class );
  when( jobConf.getCredentials() ).thenReturn( new Credentials() );
  when( conf.getAsDelegateConf( any() ) ).thenReturn( jobConf );

  installPath += Const.FILE_SEPARATOR;
  String installId = "install_id";
  when( conf.get( PentahoMapReduceJobBuilderImpl.PENTAHO_MAPREDUCE_PROPERTY_KETTLE_INSTALLATION_ID ) )
    .thenReturn( installId );
  when( fileSystem.asPath( installPath, installId ) ).thenReturn( kettleEnvInstallDir );
  when( distributedCacheUtil.isKettleEnvironmentInstalledAt( fileSystem, kettleEnvInstallDir ) ).thenReturn( false );
  String mapreduceClasspath = "mapreduceClasspath";
  when( conf.get( PentahoMapReduceJobBuilderImpl.MAPREDUCE_APPLICATION_CLASSPATH,
    PentahoMapReduceJobBuilderImpl.DEFAULT_MAPREDUCE_APPLICATION_CLASSPATH ) ).thenReturn( mapreduceClasspath );
  when( pmrArchiveGetter.getPmrArchive( conf ) ).thenReturn( mock( FileObject.class ) );

  try {
    pentahoMapReduceJobBuilder.submit( conf, "" );
  } catch ( IOException e ) {
    assertEquals( BaseMessages.getString( PentahoMapReduceJobBuilderImpl.PKG,
      PentahoMapReduceJobBuilderImpl.JOB_ENTRY_HADOOP_TRANS_JOB_EXECUTOR_INSTALLATION_OF_KETTLE_FAILED ),
      e.getMessage() );
    assertEquals( BaseMessages.getString( PentahoMapReduceJobBuilderImpl.PKG,
      PentahoMapReduceJobBuilderImpl.JOB_ENTRY_HADOOP_TRANS_JOB_EXECUTOR_KETTLE_INSTALLATION_MISSING_FROM,
      kettleEnvInstallDirUri.getPath() ).trim(), e.getCause().getMessage().trim() );
    throw e;
  }
}
 
Example 20
Source File: PentahoMapReduceJobBuilderImpl.java    From pentaho-hadoop-shims with Apache License 2.0 4 votes vote down vote up
public String getVfsFilename( Configuration conf ) {
  return pluginInterface.getPluginDirectory().getPath() + Const.FILE_SEPARATOR
    + getProperty( conf, pmrProperties, PENTAHO_MAPREDUCE_PROPERTY_PMR_LIBRARIES_ARCHIVE_FILE, null );
}