Java Code Examples for org.pentaho.di.core.logging.LogChannelInterface#logError()

The following examples show how to use org.pentaho.di.core.logging.LogChannelInterface#logError() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: Translator.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public static void main( String[] args ) {
  Display display = new Display();
  LogChannelInterface log = new LogChannel( APP_NAME );
  PropsUI.init( display, Props.TYPE_PROPERTIES_SPOON );

  Translator translator = new Translator( display );
  translator.open();

  try {
    while ( !display.isDisposed() ) {
      if ( !display.readAndDispatch() ) {
        display.sleep();
      }
    }
  } catch ( Throwable e ) {
    log.logError( "An unexpected error occurred : " + e.getMessage() );
    log.logError( Const.getStackTracker( e ) );
  }
}
 
Example 2
Source File: SlaveServerConfig.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
private void checkNetworkInterfaceSetting( LogChannelInterface log, Node slaveNode, SlaveServer slaveServer ) {
  // See if we need to grab the network interface to use and then override the host name
  //
  String networkInterfaceName = XMLHandler.getTagValue( slaveNode, "network_interface" );
  if ( !Utils.isEmpty( networkInterfaceName ) ) {
    // OK, so let's try to get the IP address for this network interface...
    //
    try {
      String newHostname = Const.getIPAddress( networkInterfaceName );
      if ( newHostname != null ) {
        slaveServer.setHostname( newHostname );
        // Also change the name of the slave...
        //
        slaveServer.setName( slaveServer.getName() + "-" + newHostname );
        log.logBasic( "Hostname for slave server ["
          + slaveServer.getName() + "] is set to [" + newHostname + "], information derived from network "
          + networkInterfaceName );
      }
    } catch ( SocketException e ) {
      log.logError( "Unable to get the IP address for network interface "
        + networkInterfaceName + " for slave server [" + slaveServer.getName() + "]", e );
    }
  }

}
 
Example 3
Source File: PentahoReportingOutput.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public static void performPentahoReportingBoot( LogChannelInterface log, Class<?> referenceClass ) {
  // Boot the Pentaho reporting engine!
  //
  if ( ClassicEngineBoot.getInstance().isBootDone() == false ) {

    ObjectUtilities.setClassLoader( referenceClass.getClassLoader() );
    ObjectUtilities.setClassLoaderSource( ObjectUtilities.CLASS_CONTEXT );

    LibLoaderBoot.getInstance().start();
    LibFontBoot.getInstance().start();
    ClassicEngineBoot.getInstance().start();

    Exception exception = ClassicEngineBoot.getInstance().getBootFailureReason();
    if ( exception != null ) {
      log.logError( "Error booting the Pentaho reporting engine", exception );
    }

  }
}
 
Example 4
Source File: CassandraOutputData.java    From learning-hadoop with Apache License 2.0 5 votes vote down vote up
/**
 * Checks for null row key and rows with no non-null values
 * 
 * @param inputMeta
 *            the input row meta
 * @param keyIndex
 *            the index of the key field in the incoming row data
 * @param row
 *            the row to check
 * @param log
 *            logging
 * @return true if the row is OK
 * @throws KettleException
 *             if a problem occurs
 */
protected static boolean preAddChecks(RowMetaInterface inputMeta,
		int keyIndex, Object[] row, LogChannelInterface log)
		throws KettleException {
	// check the key first
	ValueMetaInterface keyMeta = inputMeta.getValueMeta(keyIndex);
	if (keyMeta.isNull(row[keyIndex])) {
		log.logError(BaseMessages.getString(CassandraOutputMeta.PKG,
				"CassandraOutput.Error.SkippingRowNullKey", row));
		return false;
	}

	// quick scan to see if we have at least one non-null value apart from
	// the key
	boolean ok = false;
	for (int i = 0; i < inputMeta.size(); i++) {
		if (i != keyIndex) {
			ValueMetaInterface v = inputMeta.getValueMeta(i);
			if (!v.isNull(row[i])) {
				ok = true;
				break;
			}
		}
	}
	if (!ok) {
		log.logError(BaseMessages.getString(CassandraOutputMeta.PKG,
				"CassandraOutput.Error.SkippingRowNoNonNullValues",
				keyMeta.getString(row[keyIndex])));
	}

	return ok;
}
 
Example 5
Source File: RepositoryExporterExtension.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@Override
public void callExtensionPoint( LogChannelInterface log, Object object ) throws KettleException {

  Object[] metaInjectObjectArray = (Object[]) object;
  TransMeta transMeta = (TransMeta) metaInjectObjectArray[0];
  Class<?> PKG = (Class<?>) metaInjectObjectArray[1];
  KettleFileRepository fileRep = (KettleFileRepository) metaInjectObjectArray[2];
  StepMeta stepMeta = (StepMeta) metaInjectObjectArray[3];

  if ( stepMeta.isEtlMetaInject() ) {
    MetaInjectMeta metaInjectMeta = (MetaInjectMeta) stepMeta.getStepMetaInterface();
    // convert to a named based reference.
    //
    if ( metaInjectMeta.getSpecificationMethod() == ObjectLocationSpecificationMethod.FILENAME ) {
      try {
        TransMeta meta =
            MetaInjectMeta.loadTransformationMeta( metaInjectMeta, fileRep, fileRep.metaStore, transMeta );
        FileObject fileObject = KettleVFS.getFileObject( meta.getFilename() );
        metaInjectMeta.setSpecificationMethod( ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME );
        metaInjectMeta.setFileName( null );
        metaInjectMeta.setTransName( meta.getName() );
        metaInjectMeta.setDirectoryPath( Const.NVL( calcRepositoryDirectory( fileRep, fileObject ), "/" ) );
      } catch ( Exception e ) {
        log.logError( BaseMessages.getString( PKG, "Repository.Exporter.Log.UnableToLoadTransInMDI",
            metaInjectMeta.getName() ), e );
      }
    }
  }
}
 
Example 6
Source File: Translator2.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public static void main( String[] args ) throws Exception {

    if ( args.length != 2 ) {
      System.err.println( "Usage: Translator <translator.xml> <path-to-source>" );
      System.err.println( "Example:" );
      System.err.println( "sh translator.sh translator.xml ." );
      System.exit( 1 );
    }

    KettleClientEnvironment.init();

    String configFile = args[0];
    String sourceFolder = args[1];

    Display display = new Display();
    LogChannelInterface log = new LogChannel( APP_NAME );
    PropsUI.init( display, Props.TYPE_PROPERTIES_SPOON );

    Translator2 translator = new Translator2( display );
    translator.loadConfiguration( configFile, sourceFolder );
    translator.open();

    try {
      while ( !display.isDisposed() ) {
        if ( !display.readAndDispatch() ) {
          display.sleep();
        }
      }
    } catch ( Throwable e ) {
      log.logError( BaseMessages.getString( PKG, "i18n.UnexpectedError", e.getMessage() ) );
      log.logError( Const.getStackTracker( e ) );
    }
  }
 
Example 7
Source File: LdapProtocolFactory.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
/**
 * Returns the connection types understood by the factory
 *
 * @return the connection types understood by the factory
 * @throws KettleException
 */
public static final List<String> getConnectionTypes( LogChannelInterface log ) {
  List<String> result = new ArrayList<String>();
  synchronized ( protocols ) {
    for ( Class<? extends LdapProtocol> protocol : protocols ) {
      try {
        result.add( getName( protocol ) );
      } catch ( KettleException e ) {
        log.logError( "Unable to get name for " + protocol.getCanonicalName() );
      }
    }
  }
  return result;
}
 
Example 8
Source File: TransWebSocketEngineAdapter.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
private void logToChannel( LogChannelInterface logChannel, LogEntry data ) {
  LogLevel logLogLevel = data.getLogLogLevel();
  switch ( logLogLevel ) {
    case ERROR:
      if ( data.getThrowable() != null ) {
        logChannel.logError( data.getMessage(), data.getThrowable() );
      } else {
        logChannel.logError( data.getMessage() );
      }
      break;
    case MINIMAL:
      logChannel.logMinimal( data.getMessage() );
      break;
    case BASIC:
      logChannel.logBasic( data.getMessage() );
      break;
    case DETAILED:
      logChannel.logDetailed( data.getMessage() );
      break;
    case DEBUG:
      logChannel.logDebug( data.getMessage() );
      break;
    case TRACE:
      logChannel.logRowlevel( data.getMessage() );
      break;
  }
}
 
Example 9
Source File: JobEntryExportRepository.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
private void addFileToResultFilenames( String fileaddentry, LogChannelInterface log, Result result, Job parentJob ) {
  try {
    ResultFile resultFile =
      new ResultFile( ResultFile.FILE_TYPE_GENERAL, KettleVFS.getFileObject( fileaddentry, this ), parentJob
        .getJobname(), toString() );
    result.getResultFiles().put( resultFile.getFile().toString(), resultFile );
    if ( log.isDebug() ) {
      logDebug( BaseMessages.getString( PKG, "JobExportRepository.Log.FileAddedToResultFilesName", fileaddentry ) );
    }
  } catch ( Exception e ) {
    log.logError(
      BaseMessages.getString( PKG, "JobExportRepository.Error.AddingToFilenameResult" ), fileaddentry
        + "" + e.getMessage() );
  }
}
 
Example 10
Source File: WebServer.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public WebServer( LogChannelInterface log, TransformationMap transformationMap, JobMap jobMap,
    SocketRepository socketRepository, List<SlaveServerDetection> detections, String hostname, int port, boolean join,
    String passwordFile, SslConfiguration sslConfig ) throws Exception {
  this.log = log;
  this.transformationMap = transformationMap;
  this.jobMap = jobMap;
  this.socketRepository = socketRepository;
  this.detections = detections;
  this.hostname = hostname;
  this.port = port;
  this.passwordFile = passwordFile;
  this.sslConfig = sslConfig;

  startServer();

  // Start the monitoring of the registered slave servers...
  //
  startSlaveMonitoring();

  webServerShutdownHook = new WebServerShutdownHook( this );
  Runtime.getRuntime().addShutdownHook( webServerShutdownHook );

  try {
    ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.CarteStartup.id, this );
  } catch ( KettleException e ) {
    // Log error but continue regular operations to make sure Carte continues to run properly
    //
    log.logError( "Error calling extension point CarteStartup", e );
  }

  if ( join ) {
    server.join();
  }
}
 
Example 11
Source File: WriterAppenderManager.java    From pentaho-hadoop-shims with Apache License 2.0 5 votes vote down vote up
public WriterAppenderManager( LogChannelInterface logChannelInterface, LogLevel logLevel, String name,
                              LogWriter logWriter ) {
  // Set up an appender that will send all pig log messages to Kettle's log
  // via logBasic().
  KettleLoggingPrintWriter klps = new KettleLoggingPrintWriter( logChannelInterface );
  pigToKettleAppender = new WriterAppender( new Log4jKettleLayout( true ), klps );

  Logger pigLogger = Logger.getLogger( "org.apache.pig" );
  Level log4jLevel = getLog4jLevel( logLevel );
  pigLogger.setLevel( log4jLevel );
  String logFileName = "pdi-" + name; //$NON-NLS-1$
  Log4jFileAppender appender = null;
  this.logWriter = logWriter;
  try {
    appender = LogWriter.createFileAppender( logFileName, true, false );
    logWriter.addAppender( appender );
    logChannelInterface.setLogLevel( logLevel );
    if ( pigLogger != null ) {
      pigLogger.addAppender( pigToKettleAppender );
    }
  } catch ( Exception e ) {
    logChannelInterface.logError( BaseMessages
      .getString( PKG, "JobEntryPigScriptExecutor.FailedToOpenLogFile", logFileName, e.toString() ) ); //$NON-NLS-1$
    logChannelInterface.logError( Const.getStackTracker( e ) );
  }
  this.appender = appender;
}
 
Example 12
Source File: JobEntryWriteToLog.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
/**
 * Output message to job log.
 */
public boolean evaluate( Result result ) {
  LogChannelInterface logChannel = createLogChannel();
  String message = getRealLogMessage();

  // Filter out empty messages and those that are not visible with the job's log level
  if ( Utils.isEmpty( message ) || !getEntryLogLevel().isVisible( logChannel.getLogLevel() ) ) {
    return true;
  }

  try {
    switch ( getEntryLogLevel() ) {
      case ERROR:
        logChannel.logError( message + Const.CR );
        break;
      case MINIMAL:
        logChannel.logMinimal( message + Const.CR );
        break;
      case BASIC:
        logChannel.logBasic( message + Const.CR );
        break;
      case DETAILED:
        logChannel.logDetailed( message + Const.CR );
        break;
      case DEBUG:
        logChannel.logDebug( message + Const.CR );
        break;
      case ROWLEVEL:
        logChannel.logRowlevel( message + Const.CR );
        break;
      default: // NOTHING
        break;
    }

    return true;
  } catch ( Exception e ) {
    result.setNrErrors( 1 );
    log.logError( BaseMessages.getString( PKG, "WriteToLog.Error.Label" ), BaseMessages.getString(
      PKG, "WriteToLog.Error.Description" )
      + " : " + e.toString() );
    return false;
  }

}
 
Example 13
Source File: BlackBoxIT.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
@Test
public void runTransOrJob() throws Exception {

  // Params are:
  // File transFile
  // List<File> expectedFiles

  LogChannelInterface log = new LogChannel( "BlackBoxTest [" + transFile.toString() + "]" );

  if ( !transFile.exists() ) {
    log.logError( "Transformation does not exist: " + getPath( transFile ) );
    addFailure( "Transformation does not exist: " + getPath( transFile ) );
    fail( "Transformation does not exist: " + getPath( transFile ) );
  }
  if ( expectedFiles.isEmpty() ) {
    addFailure( "No expected output files found: " + getPath( transFile ) );
    fail( "No expected output files found: " + getPath( transFile ) );
  }

  Result result = runTrans( transFile.getAbsolutePath(), log );

  // verify all the expected output files...
  //
  for ( int i = 0; i < expectedFiles.size(); i++ ) {

    File expected = expectedFiles.get( i );

    if ( expected.getAbsoluteFile().toString().contains( ".expected" ) ) {

      // create a path to the expected output
      String actualFile = expected.getAbsolutePath();
      actualFile = actualFile.replaceFirst( ".expected_" + i + ".", ".actual_" + i + "." ); // multiple files case
      actualFile = actualFile.replaceFirst( ".expected.", ".actual." ); // single file case
      File actual = new File( actualFile );
      if ( result.getResult() ) {
        fileCompare( expected, actual );
      }
    }
  }

  // We didn't get a result, so the only expected file should be a ".fail.txt" file
  //
  if ( !result.getResult() ) {
    String logStr = KettleLogStore.getAppender().getBuffer( result.getLogChannelId(), true ).toString();

    if ( expectedFiles.size() == 0 ) {
      // We haven't got a ".fail.txt" file, so this is a real failure
      fail( "Error running " + getPath( transFile ) + ":" + logStr );
    }
  }
}
 
Example 14
Source File: DatabaseLogExceptionFactory.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
@Override
public void registerException( LogChannelInterface log, Class<?> packageClass, String key, String... parameters ) {
  log.logError( BaseMessages.getString( packageClass, key, parameters ) );
}
 
Example 15
Source File: DatabaseLogExceptionFactory.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
@Override public void registerException( LogChannelInterface log, Exception e, Class<?> packageClass, String key,
    String... parameters ) throws KettleDatabaseException {
  log.logError( BaseMessages.getString( packageClass, key, parameters ), e );
}
 
Example 16
Source File: DatabaseLogExceptionFactory.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
@Override public void registerException( LogChannelInterface log, Exception e, Class<?> packageClass, String key,
    String... parameters ) throws KettleDatabaseException {
  registerException( log, packageClass, key, parameters );
  log.logError( e.getMessage() );
}
 
Example 17
Source File: ValidateTransUnitTestExtensionPoint.java    From pentaho-pdi-dataset with Apache License 2.0 4 votes vote down vote up
@Override
public void callExtensionPoint( LogChannelInterface log, Object object ) throws KettleException {
  if ( !( object instanceof Trans ) ) {
    return;
  }

  final Trans trans = (Trans) object;
  final TransMeta transMeta = trans.getTransMeta();
  boolean runUnitTest = "Y".equalsIgnoreCase( transMeta.getVariable( DataSetConst.VAR_RUN_UNIT_TEST ) );
  if ( !runUnitTest ) {
    return;
  }

  // We should always have a unit test name here...
  String unitTestName = transMeta.getVariable( DataSetConst.VAR_UNIT_TEST_NAME );
  if ( StringUtil.isEmpty( unitTestName ) ) {
    return;
  }

  try {
    IMetaStore metaStore = transMeta.getMetaStore();
    Repository repository = transMeta.getRepository();

    if ( metaStore == null ) {
      return; // Nothing to do here, we can't reference data sets.
    }

    List<DatabaseMeta> databases = DataSetConst.getAvailableDatabases( repository, transMeta.getSharedObjects() );
    FactoriesHierarchy factoriesHierarchy = new FactoriesHierarchy( metaStore, databases );

    // If the transformation has a variable set with the unit test in it, we're dealing with a unit test situation.
    //
    TransUnitTest unitTest = factoriesHierarchy.getTestFactory().loadElement( unitTestName );

    final List<UnitTestResult> results = new ArrayList<UnitTestResult>();
    trans.getExtensionDataMap().put( DataSetConst.UNIT_TEST_RESULTS, results );


    // Validate execution results with what's in the data sets...
    //
    int errors = DataSetConst.validateTransResultAgainstUnitTest( trans, unitTest, factoriesHierarchy, results );
    if ( errors == 0 ) {
      log.logBasic( "Unit test '" + unitTest.getName() + "' passed succesfully" );
    } else {
      log.logBasic( "Unit test '" + unitTest.getName() + "' failed, " + errors + " errors detected, " + results.size() + " comments to report." );

      String dontShowResults = transMeta.getVariable( DataSetConst.VAR_DO_NOT_SHOW_UNIT_TEST_ERRORS, "N" );

      final Spoon spoon = Spoon.getInstance();
      if ( spoon != null && "N".equalsIgnoreCase( dontShowResults ) ) {
        spoon.getShell().getDisplay().asyncExec( new Runnable() {
          @Override
          public void run() {
            PreviewRowsDialog dialog = new PreviewRowsDialog( spoon.getShell(), trans, SWT.NONE,
              "Unit test results",
              UnitTestResult.getRowMeta(),
              UnitTestResult.getRowData( results ) );
            dialog.setDynamic( false );
            dialog.setProposingToGetMoreRows( false );
            dialog.setProposingToStop( false );
            dialog.setTitleMessage( "Unit test results", "Here are the results of the unit test validations:" );
            dialog.open();
          }
        } );
      }
    }
    log.logBasic( "----------------------------------------------" );
    for ( UnitTestResult result : results ) {
      if ( result.getDataSetName() != null ) {
        log.logBasic( result.getStepName() + " - " + result.getDataSetName() + " : " + result.getComment() );
      } else {
        log.logBasic( result.getComment() );
      }
    }
    log.logBasic( "----------------------------------------------" );
  } catch ( Throwable e ) {
    log.logError( "Unable to validate unit test/golden rows", e );
  }

}
 
Example 18
Source File: PurRepository.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
/**
 * Load all transformations referenced by {@code files}.
 *
 * @param monitor
 * @param log
 * @param files
 *          Transformation files to load.
 * @param setInternalVariables
 *          Should internal variables be set when loading? (Note: THIS IS IGNORED, they are always set)
 * @return Loaded transformations
 * @throws KettleException
 *           Error loading data for transformations from repository
 */
protected List<TransMeta> loadTransformations( final ProgressMonitorListener monitor, final LogChannelInterface log,
                                               final List<RepositoryFile> files, final boolean setInternalVariables )
  throws KettleException {
  List<TransMeta> transformations = new ArrayList<TransMeta>( files.size() );

  readWriteLock.readLock().lock();
  List<NodeRepositoryFileData> filesData;
  List<VersionSummary> versions;
  try {
    filesData = pur.getDataForReadInBatch( files, NodeRepositoryFileData.class );
    versions = pur.getVersionSummaryInBatch( files );
  } finally {
    readWriteLock.readLock().unlock();
  }

  Iterator<RepositoryFile> filesIter = files.iterator();
  Iterator<NodeRepositoryFileData> filesDataIter = filesData.iterator();
  Iterator<VersionSummary> versionsIter = versions.iterator();
  while ( ( monitor == null || !monitor.isCanceled() ) && filesIter.hasNext() ) {
    RepositoryFile file = filesIter.next();
    NodeRepositoryFileData fileData = filesDataIter.next();
    VersionSummary version = versionsIter.next();
    String
      dirPath =
      file.getPath().substring( 0, file.getPath().lastIndexOf( RepositoryDirectory.DIRECTORY_SEPARATOR ) );
    try {
      log.logDetailed( "Loading/Exporting transformation [{0} : {1}]  ({2})", dirPath, file.getTitle(), file
        .getPath() ); //$NON-NLS-1$
      if ( monitor != null ) {
        monitor.subTask( "Exporting transformation [" + file.getPath() + "]" ); //$NON-NLS-1$ //$NON-NLS-2$
      }
      TransMeta
        transMeta =
        buildTransMeta( file, findDirectory( dirPath ), fileData, createObjectRevision( version ) );
      ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationMetaLoaded.id, transMeta );
      transformations.add( transMeta );
    } catch ( Exception ex ) {
      log.logDetailed( "Unable to load transformation [" + file.getPath() + "]", ex ); //$NON-NLS-1$ //$NON-NLS-2$
      log.logError( "An error occurred reading transformation [" + file.getTitle() + "] from directory [" + dirPath
        + "] : " + ex.getMessage() ); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
      log.logError( "Transformation [" + file.getTitle() + "] from directory [" + dirPath
        + "] was not exported because of a loading error!" ); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
    }
  }
  return transformations;
}
 
Example 19
Source File: PurRepository.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
/**
 * Load all jobs referenced by {@code files}.
 *
 * @param monitor
 * @param log
 * @param files
 *          Job files to load.
 * @param setInternalVariables
 *          Should internal variables be set when loading? (Note: THIS IS IGNORED, they are always set)
 * @return Loaded jobs
 * @throws KettleException
 *           Error loading data for jobs from repository
 */
protected List<JobMeta> loadJobs( final ProgressMonitorListener monitor, final LogChannelInterface log,
                                  final List<RepositoryFile> files, final boolean setInternalVariables )
  throws KettleException {
  List<JobMeta> jobs = new ArrayList<JobMeta>( files.size() );

  readWriteLock.readLock().lock();
  List<NodeRepositoryFileData> filesData;
  List<VersionSummary> versions;
  try {
    filesData = pur.getDataForReadInBatch( files, NodeRepositoryFileData.class );
    versions = pur.getVersionSummaryInBatch( files );
  } finally {
    readWriteLock.readLock().unlock();
  }

  Iterator<RepositoryFile> filesIter = files.iterator();
  Iterator<NodeRepositoryFileData> filesDataIter = filesData.iterator();
  Iterator<VersionSummary> versionsIter = versions.iterator();
  while ( ( monitor == null || !monitor.isCanceled() ) && filesIter.hasNext() ) {
    RepositoryFile file = filesIter.next();
    NodeRepositoryFileData fileData = filesDataIter.next();
    VersionSummary version = versionsIter.next();
    try {
      String
        dirPath =
        file.getPath().substring( 0, file.getPath().lastIndexOf( RepositoryDirectory.DIRECTORY_SEPARATOR ) );
      log.logDetailed( "Loading/Exporting job [{0} : {1}]  ({2})", dirPath, file.getTitle(),
        file.getPath() ); //$NON-NLS-1$
      if ( monitor != null ) {
        monitor.subTask( "Exporting job [" + file.getPath() + "]" ); //$NON-NLS-1$ //$NON-NLS-2$
      }
      JobMeta jobMeta = buildJobMeta( file, findDirectory( dirPath ), fileData, createObjectRevision( version ) );
      ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.JobMetaLoaded.id, jobMeta );
      jobs.add( jobMeta );
    } catch ( Exception ex ) {
      log.logError( "Unable to load job [" + file.getPath() + "]", ex ); //$NON-NLS-1$ //$NON-NLS-2$
    }
  }
  return jobs;

}