Java Code Examples for org.pentaho.di.core.Result#getRows()

The following examples show how to use org.pentaho.di.core.Result#getRows() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: JobEntryCheckFilesLocked.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public Result execute( Result previousResult, int nr ) {

    Result result = previousResult;
    List<RowMetaAndData> rows = result.getRows();

    oneFileLocked = false;
    result.setResult( true );

    try {
      if ( argFromPrevious  && isDetailed() ) {
        logDetailed( BaseMessages.getString( PKG, "JobEntryCheckFilesLocked.FoundPreviousRows", String
          .valueOf( ( rows != null ? rows.size() : 0 ) ) ) );
      }

      if ( argFromPrevious && rows != null ) {
        processFromPreviousArgument( rows );
      } else if ( arguments != null ) {

        for ( int i = 0; i < arguments.length && !parentJob.isStopped(); i++ ) {
          // ok we can process this file/folder
          if ( isDetailed() ) {
            logDetailed( BaseMessages.getString(
              PKG, "JobEntryCheckFilesLocked.ProcessingArg", arguments[i], filemasks[i] ) );
          }

          processFile( arguments[i], filemasks[i] );
        }
      }

      if ( oneFileLocked ) {
        result.setResult( false );
        result.setNrErrors( 1 );
      }
    } catch ( Exception e ) {
      logError( BaseMessages.getString( PKG, "JobEntryCheckFilesLocked.ErrorRunningJobEntry", e ) );
    }

    return result;
  }
 
Example 2
Source File: TransExecutor.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@VisibleForTesting
void collectTransResults( Result result ) throws KettleException {
  RowSet transResultsRowSet = getData().getResultRowsRowSet();
  if ( meta.getOutputRowsSourceStepMeta() != null && transResultsRowSet != null ) {
    for ( RowMetaAndData metaAndData : result.getRows() ) {
      putRowTo( metaAndData.getRowMeta(), metaAndData.getData(), transResultsRowSet );
    }
  }
}
 
Example 3
Source File: JobEntryDosToUnix.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public Result execute( Result previousResult, int nr ) throws KettleException {
  Result result = previousResult;
  result.setNrErrors( 1 );
  result.setResult( false );

  List<RowMetaAndData> rows = previousResult.getRows();
  RowMetaAndData resultRow = null;

  nrErrors = 0;
  nrProcessedFiles = 0;
  nrErrorFiles = 0;
  limitFiles = Const.toInt( environmentSubstitute( getNrErrorsLessThan() ), 10 );
  successConditionBroken = false;
  successConditionBrokenExit = false;
  tempFolder = environmentSubstitute( "%%java.io.tmpdir%%" );

  // Get source and destination files, also wildcard
  String[] vsourcefilefolder = source_filefolder;
  String[] vwildcard = wildcard;

  if ( arg_from_previous ) {
    if ( isDetailed() ) {
      logDetailed( BaseMessages.getString( PKG, "JobDosToUnix.Log.ArgFromPrevious.Found", ( rows != null ? rows
        .size() : 0 )
        + "" ) );
    }

  }
  if ( arg_from_previous && rows != null ) {
    // Copy the input row to the (command line) arguments
    for ( int iteration = 0; iteration < rows.size() && !parentJob.isStopped(); iteration++ ) {
      if ( successConditionBroken ) {
        if ( !successConditionBrokenExit ) {
          logError( BaseMessages.getString( PKG, "JobDosToUnix.Error.SuccessConditionbroken", "" + nrAllErrors ) );
          successConditionBrokenExit = true;
        }
        result.setEntryNr( nrAllErrors );
        result.setNrLinesRejected( nrErrorFiles );
        result.setNrLinesWritten( nrProcessedFiles );
        return result;
      }

      resultRow = rows.get( iteration );

      // Get source and destination file names, also wildcard
      String vsourcefilefolder_previous = resultRow.getString( 0, null );
      String vwildcard_previous = resultRow.getString( 1, null );
      int convertion_type = JobEntryDosToUnix.getConversionTypeByCode( resultRow.getString( 2, null ) );

      if ( isDetailed() ) {
        logDetailed( BaseMessages.getString(
          PKG, "JobDosToUnix.Log.ProcessingRow", vsourcefilefolder_previous, vwildcard_previous ) );
      }

      processFileFolder( vsourcefilefolder_previous, vwildcard_previous, convertion_type, parentJob, result );
    }
  } else if ( vsourcefilefolder != null ) {
    for ( int i = 0; i < vsourcefilefolder.length && !parentJob.isStopped(); i++ ) {
      if ( successConditionBroken ) {
        if ( !successConditionBrokenExit ) {
          logError( BaseMessages.getString( PKG, "JobDosToUnix.Error.SuccessConditionbroken", "" + nrAllErrors ) );
          successConditionBrokenExit = true;
        }
        result.setEntryNr( nrAllErrors );
        result.setNrLinesRejected( nrErrorFiles );
        result.setNrLinesWritten( nrProcessedFiles );
        return result;
      }

      if ( isDetailed() ) {
        logDetailed( BaseMessages.getString(
          PKG, "JobDosToUnix.Log.ProcessingRow", vsourcefilefolder[i], vwildcard[i] ) );
      }

      processFileFolder( vsourcefilefolder[i], vwildcard[i], conversionTypes[i], parentJob, result );

    }
  }

  // Success Condition
  result.setNrErrors( nrAllErrors );
  result.setNrLinesRejected( nrErrorFiles );
  result.setNrLinesWritten( nrProcessedFiles );
  if ( getSuccessStatus() ) {
    result.setNrErrors( 0 );
    result.setResult( true );
  }

  displayResults();

  return result;
}
 
Example 4
Source File: JobEntryTruncateTables.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public Result execute( Result previousResult, int nr ) {
  Result result = previousResult;
  List<RowMetaAndData> rows = result.getRows();
  RowMetaAndData resultRow = null;

  result.setResult( true );
  nrErrors = 0;
  continueProcess = true;
  nrSuccess = 0;

  if ( argFromPrevious ) {
    if ( log.isDetailed() ) {
      logDetailed( BaseMessages.getString( PKG, "JobEntryTruncateTables.FoundPreviousRows", String
        .valueOf( ( rows != null ? rows.size() : 0 ) ) ) );
    }
    if ( rows.size() == 0 ) {
      return result;
    }
  }
  if ( connection != null ) {
    Database db = new Database( this, connection );
    db.shareVariablesWith( this );
    try {
      db.connect( parentJob.getTransactionId(), null );
      if ( argFromPrevious && rows != null ) { // Copy the input row to the (command line) arguments

        for ( int iteration = 0; iteration < rows.size() && !parentJob.isStopped() && continueProcess; iteration++ ) {
          resultRow = rows.get( iteration );

          // Get values from previous result
          String tablename_previous = resultRow.getString( 0, null );
          String schemaname_previous = resultRow.getString( 1, null );

          if ( !Utils.isEmpty( tablename_previous ) ) {
            if ( log.isDetailed() ) {
              logDetailed( BaseMessages.getString(
                PKG, "JobEntryTruncateTables.ProcessingRow", tablename_previous, schemaname_previous ) );
            }

            // let's truncate table
            if ( truncateTables( tablename_previous, schemaname_previous, db ) ) {
              updateSuccess();
            } else {
              updateErrors();
            }
          } else {
            logError( BaseMessages.getString( PKG, "JobEntryTruncateTables.RowEmpty" ) );
          }
        }

      } else if ( arguments != null ) {
        for ( int i = 0; i < arguments.length && !parentJob.isStopped() && continueProcess; i++ ) {
          String realTablename = environmentSubstitute( arguments[i] );
          String realSchemaname = environmentSubstitute( schemaname[i] );
          if ( !Utils.isEmpty( realTablename ) ) {
            if ( log.isDetailed() ) {
              logDetailed( BaseMessages.getString(
                PKG, "JobEntryTruncateTables.ProcessingArg", arguments[i], schemaname[i] ) );
            }

            // let's truncate table
            if ( truncateTables( realTablename, realSchemaname, db ) ) {
              updateSuccess();
            } else {
              updateErrors();
            }
          } else {
            logError( BaseMessages.getString(
              PKG, "JobEntryTruncateTables.ArgEmpty", arguments[i], schemaname[i] ) );
          }
        }
      }
    } catch ( Exception dbe ) {
      result.setNrErrors( 1 );
      logError( BaseMessages.getString( PKG, "JobEntryTruncateTables.Error.RunningEntry", dbe.getMessage() ) );
    } finally {
      if ( db != null ) {
        db.disconnect();
      }
    }
  } else {
    result.setNrErrors( 1 );
    logError( BaseMessages.getString( PKG, "JobEntryTruncateTables.NoDbConnection" ) );
  }

  result.setNrErrors( nrErrors );
  result.setNrLinesDeleted( nrSuccess );
  result.setResult( nrErrors == 0 );
  return result;
}
 
Example 5
Source File: JobEntryDeleteFolders.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public Result execute( Result result, int nr ) throws KettleException {
  List<RowMetaAndData> rows = result.getRows();

  result.setNrErrors( 1 );
  result.setResult( false );

  NrErrors = 0;
  NrSuccess = 0;
  successConditionBroken = false;
  successConditionBrokenExit = false;
  limitFolders = Const.toInt( environmentSubstitute( getLimitFolders() ), 10 );

  //Set Embedded NamedCluter MetatStore Provider Key so that it can be passed to VFS
  if ( parentJobMeta.getNamedClusterEmbedManager() != null ) {
    parentJobMeta.getNamedClusterEmbedManager()
      .passEmbeddedMetastoreKey( this, parentJobMeta.getEmbeddedMetastoreProviderKey() );
  }

  if ( argFromPrevious ) {
    if ( log.isDetailed() ) {
      logDetailed( BaseMessages.getString( PKG, "JobEntryDeleteFolders.FoundPreviousRows", String
        .valueOf( ( rows != null ? rows.size() : 0 ) ) ) );
    }
  }

  if ( argFromPrevious && rows != null ) {
    for ( int iteration = 0; iteration < rows.size() && !parentJob.isStopped(); iteration++ ) {
      if ( successConditionBroken ) {
        logError( BaseMessages.getString( PKG, "JobEntryDeleteFolders.Error.SuccessConditionbroken", ""
          + NrErrors ) );
        result.setNrErrors( NrErrors );
        result.setNrLinesDeleted( NrSuccess );
        return result;
      }
      RowMetaAndData resultRow = rows.get( iteration );
      String args_previous = resultRow.getString( 0, null );
      if ( !Utils.isEmpty( args_previous ) ) {
        if ( deleteFolder( args_previous ) ) {
          updateSuccess();
        } else {
          updateErrors();
        }
      } else {
        // empty filename !
        logError( BaseMessages.getString( PKG, "JobEntryDeleteFolders.Error.EmptyLine" ) );
      }
    }
  } else if ( arguments != null ) {
    for ( int i = 0; i < arguments.length && !parentJob.isStopped(); i++ ) {
      if ( successConditionBroken ) {
        logError( BaseMessages.getString( PKG, "JobEntryDeleteFolders.Error.SuccessConditionbroken", ""
          + NrErrors ) );
        result.setNrErrors( NrErrors );
        result.setNrLinesDeleted( NrSuccess );
        return result;
      }
      String realfilename = environmentSubstitute( arguments[i] );
      if ( !Utils.isEmpty( realfilename ) ) {
        if ( deleteFolder( realfilename ) ) {
          updateSuccess();
        } else {
          updateErrors();
        }
      } else {
        // empty filename !
        logError( BaseMessages.getString( PKG, "JobEntryDeleteFolders.Error.EmptyLine" ) );
      }
    }
  }

  if ( log.isDetailed() ) {
    logDetailed( "=======================================" );
    logDetailed( BaseMessages.getString( PKG, "JobEntryDeleteFolders.Log.Info.NrError", "" + NrErrors ) );
    logDetailed( BaseMessages.getString( PKG, "JobEntryDeleteFolders.Log.Info.NrDeletedFolders", "" + NrSuccess ) );
    logDetailed( "=======================================" );
  }

  result.setNrErrors( NrErrors );
  result.setNrLinesDeleted( NrSuccess );
  if ( getSuccessStatus() ) {
    result.setResult( true );
  }

  return result;
}
 
Example 6
Source File: JobEntryDeleteFiles.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public Result execute( Result result, int nr ) throws KettleException {
  List<RowMetaAndData> resultRows = result.getRows();

  int numberOfErrFiles = 0;
  result.setResult( false );
  result.setNrErrors( 1 );

  if ( argFromPrevious && log.isDetailed() ) {
    logDetailed( BaseMessages.getString( PKG, "JobEntryDeleteFiles.FoundPreviousRows", String
      .valueOf( ( resultRows != null ? resultRows.size() : 0 ) ) ) );
  }

  //Set Embedded NamedCluter MetatStore Provider Key so that it can be passed to VFS
  if ( parentJobMeta.getNamedClusterEmbedManager() != null ) {
    parentJobMeta.getNamedClusterEmbedManager()
      .passEmbeddedMetastoreKey( this, parentJobMeta.getEmbeddedMetastoreProviderKey() );
  }

  Multimap<String, String> pathToMaskMap = populateDataForJobExecution( resultRows );

  for ( Map.Entry<String, String> pathToMask : pathToMaskMap.entries() ) {
    final String filePath = environmentSubstitute( pathToMask.getKey() );
    if ( filePath.trim().isEmpty() ) {
      // Relative paths are permitted, and providing an empty path means deleting all files inside a root pdi-folder.
      // It is much more likely to be a mistake than a desirable action, so we don't delete anything (see PDI-15181)
      if ( log.isDetailed() ) {
        logDetailed( BaseMessages.getString( PKG, "JobEntryDeleteFiles.NoPathProvided" ) );
      }
    } else {
      final String fileMask = environmentSubstitute( pathToMask.getValue() );

      if ( parentJob.isStopped() ) {
        break;
      }

      if ( !processFile( filePath, fileMask, parentJob ) ) {
        numberOfErrFiles++;
      }
    }
  }

  if ( numberOfErrFiles == 0 ) {
    result.setResult( true );
    result.setNrErrors( 0 );
  } else {
    result.setNrErrors( numberOfErrFiles );
    result.setResult( false );
  }

  return result;
}
 
Example 7
Source File: JobEntryMSAccessBulkLoad.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public Result execute( Result previousResult, int nr ) {
  Result result = previousResult;

  List<RowMetaAndData> rows = result.getRows();
  RowMetaAndData resultRow = null;
  result.setResult( false );

  NrErrors = 0;
  NrSuccess = 0;
  NrFilesToProcess = 0;
  continueProcessing = true;
  limitFiles = Const.toInt( environmentSubstitute( getLimit() ), 10 );

  // Get source and destination files, also wildcard
  String[] vsourceFilefolder = source_filefolder;
  String[] vsourceWildcard = source_wildcard;
  String[] vsourceDelimiter = delimiter;
  String[] targetDb = target_Db;
  String[] targetTable = target_table;

  try {

    if ( is_args_from_previous ) {
      if ( log.isDetailed() ) {
        logDetailed( BaseMessages.getString(
          PKG, "JobEntryMSAccessBulkLoad.Log.ArgFromPrevious.Found", ( rows != null ? rows.size() : 0 ) + "" ) );
      }
    }
    if ( is_args_from_previous && rows != null ) {
      for ( int iteration = 0; iteration < rows.size()
        && !parentJob.isStopped()
        && continueProcessing; iteration++ ) {
        resultRow = rows.get( iteration );

        // Get source and destination file names, also wildcard
        String vSourceFileFolder_previous = resultRow.getString( 0, null );
        String vSourceWildcard_previous = resultRow.getString( 1, null );
        String vDelimiter_previous = resultRow.getString( 2, null );
        String vTargetDb_previous = resultRow.getString( 3, null );
        String vTargetTable_previous = resultRow.getString( 4, null );

        processOneRow(
          vSourceFileFolder_previous, vSourceWildcard_previous, vDelimiter_previous, vTargetDb_previous,
          vTargetTable_previous, parentJob, result );

      }
    } else if ( vsourceFilefolder != null && targetDb != null && targetTable != null ) {
      for ( int i = 0; i < vsourceFilefolder.length && !parentJob.isStopped() && continueProcessing; i++ ) {
        // get real values
        String realSourceFileFolder = environmentSubstitute( vsourceFilefolder[i] );
        String realSourceWildcard = environmentSubstitute( vsourceWildcard[i] );
        String realSourceDelimiter = environmentSubstitute( vsourceDelimiter[i] );
        String realTargetDb = environmentSubstitute( targetDb[i] );
        String realTargetTable = environmentSubstitute( targetTable[i] );

        processOneRow(
          realSourceFileFolder, realSourceWildcard, realSourceDelimiter, realTargetDb, realTargetTable,
          parentJob, result );
      }
    }
  } catch ( Exception e ) {
    incrErrors();
    logError( BaseMessages.getString( PKG, "JobEntryMSAccessBulkLoad.UnexpectedError", e.getMessage() ) );
  }

  // Success Condition
  result.setNrErrors( NrErrors );
  result.setNrLinesInput( NrFilesToProcess );
  result.setNrLinesWritten( NrSuccess );
  if ( getSuccessStatus() ) {
    result.setResult( true );
  }

  displayResults();
  return result;
}
 
Example 8
Source File: JobEntryXSLT.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public Result execute( Result previousResult, int nr ) throws KettleException {
  Result result = previousResult;
  int NrErrors = 0;
  int NrSuccess = 0;

  // Check output parameters
  int nrOutputProps = getOutputPropertyName() == null ? 0 : getOutputPropertyName().length;
  if ( nrOutputProps > 0 ) {
    outputProperties = new Properties();
    for ( int i = 0; i < nrOutputProps; i++ ) {
      outputProperties.put( getOutputPropertyName()[i], environmentSubstitute( getOutputPropertyValue()[i] ) );
    }
    setOutputProperties = true;
  }

  // Check parameters
  nrParams = getParameterField() == null ? 0 : getParameterField().length;
  if ( nrParams > 0 ) {
    nameOfParams = new String[nrParams];
    valueOfParams = new String[nrParams];
    for ( int i = 0; i < nrParams; i++ ) {
      String name = environmentSubstitute( getParameterName()[i] );
      String value = environmentSubstitute( getParameterField()[i] );
      if ( Utils.isEmpty( value ) ) {
        throw new KettleStepException( BaseMessages.getString( PKG, "Xslt.Exception.ParameterFieldMissing", name, i ) );
      }
      nameOfParams[i] = name;
      valueOfParams[i] = value;
    }
    useParameters = true;
  }

  List<RowMetaAndData> rows = result.getRows();
  if ( isFilenamesFromPrevious() ) {
    if ( log.isDetailed() ) {
      logDetailed( BaseMessages.getString( PKG, "JobEntryXSLT.Log.ArgFromPrevious.Found", ( rows != null ? rows
          .size() : 0 )
          + "" ) );
    }
  }

  if ( isFilenamesFromPrevious() && rows != null ) {
    // Copy the input row to the (command line) arguments
    RowMetaAndData resultRow = null;
    for ( int iteration = 0; iteration < rows.size() && !parentJob.isStopped(); iteration++ ) {
      resultRow = rows.get( iteration );

      // Get filenames (xml, xsl, output filename)
      String xmlfilename_previous = resultRow.getString( 0, null );
      String xslfilename_previous = resultRow.getString( 1, null );
      String ouputfilename_previous = resultRow.getString( 2, null );

      if ( !Utils.isEmpty( xmlfilename_previous ) && !Utils.isEmpty( xslfilename_previous )
          && !Utils.isEmpty( ouputfilename_previous ) ) {
        if ( processOneXMLFile( xmlfilename_previous, xslfilename_previous, ouputfilename_previous, result, parentJob ) ) {
          NrSuccess++;
        } else {
          NrErrors++;
        }
      } else {
        // We failed!
        logError( BaseMessages.getString( PKG, "JobEntryXSLT.AllFilesNotNull.Label" ) );
        NrErrors++;
      }

    }
  } else {
    String realxmlfilename = getRealxmlfilename();
    String realxslfilename = getRealxslfilename();
    String realoutputfilename = getoutputfilename();
    if ( !Utils.isEmpty( realxmlfilename ) && !Utils.isEmpty( realxslfilename )
        && !Utils.isEmpty( realoutputfilename ) ) {
      if ( processOneXMLFile( realxmlfilename, realxslfilename, realoutputfilename, result, parentJob ) ) {
        NrSuccess++;
      } else {
        NrErrors++;
      }
    } else {
      // We failed!
      logError( BaseMessages.getString( PKG, "JobEntryXSLT.AllFilesNotNull.Label" ) );
      NrErrors++;
    }
  }

  result.setResult( NrErrors == 0 );
  result.setNrErrors( NrErrors );
  result.setNrLinesWritten( NrSuccess );

  return result;
}
 
Example 9
Source File: JobEntryXMLWellFormed.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public Result execute( Result previousResult, int nr ) throws KettleException {
  Result result = previousResult;
  result.setNrErrors( 1 );
  result.setResult( false );

  List<RowMetaAndData> rows = result.getRows();
  RowMetaAndData resultRow = null;

  NrErrors = 0;
  NrWellFormed = 0;
  NrBadFormed = 0;
  limitFiles = Const.toInt( environmentSubstitute( getNrErrorsLessThan() ), 10 );
  successConditionBroken = false;
  successConditionBrokenExit = false;

  // Get source and destination files, also wildcard
  String[] vsourcefilefolder = source_filefolder;
  String[] vwildcard = wildcard;

  if ( arg_from_previous ) {
    if ( log.isDetailed() ) {
      logDetailed( BaseMessages.getString( PKG, "JobXMLWellFormed.Log.ArgFromPrevious.Found", ( rows != null ? rows
          .size() : 0 )
          + "" ) );
    }

  }
  if ( arg_from_previous && rows != null ) {
    // Copy the input row to the (command line) arguments
    for ( int iteration = 0; iteration < rows.size() && !parentJob.isStopped(); iteration++ ) {
      if ( successConditionBroken ) {
        if ( !successConditionBrokenExit ) {
          logError( BaseMessages.getString( PKG, "JobXMLWellFormed.Error.SuccessConditionbroken", "" + NrAllErrors ) );
          successConditionBrokenExit = true;
        }
        result.setEntryNr( NrAllErrors );
        result.setNrLinesRejected( NrBadFormed );
        result.setNrLinesWritten( NrWellFormed );
        return result;
      }

      resultRow = rows.get( iteration );

      // Get source and destination file names, also wildcard
      String vsourcefilefolder_previous = resultRow.getString( 0, null );
      String vwildcard_previous = resultRow.getString( 1, null );

      if ( log.isDetailed() ) {
        logDetailed( BaseMessages.getString( PKG, "JobXMLWellFormed.Log.ProcessingRow", vsourcefilefolder_previous,
            vwildcard_previous ) );
      }

      processFileFolder( vsourcefilefolder_previous, vwildcard_previous, parentJob, result );
    }
  } else if ( vsourcefilefolder != null ) {
    for ( int i = 0; i < vsourcefilefolder.length && !parentJob.isStopped(); i++ ) {
      if ( successConditionBroken ) {
        if ( !successConditionBrokenExit ) {
          logError( BaseMessages.getString( PKG, "JobXMLWellFormed.Error.SuccessConditionbroken", "" + NrAllErrors ) );
          successConditionBrokenExit = true;
        }
        result.setEntryNr( NrAllErrors );
        result.setNrLinesRejected( NrBadFormed );
        result.setNrLinesWritten( NrWellFormed );
        return result;
      }

      if ( log.isDetailed() ) {
        logDetailed( BaseMessages.getString( PKG, "JobXMLWellFormed.Log.ProcessingRow", vsourcefilefolder[i],
            vwildcard[i] ) );
      }

      processFileFolder( vsourcefilefolder[i], vwildcard[i], parentJob, result );

    }
  }

  // Success Condition
  result.setNrErrors( NrAllErrors );
  result.setNrLinesRejected( NrBadFormed );
  result.setNrLinesWritten( NrWellFormed );
  if ( getSuccessStatus() ) {
    result.setNrErrors( 0 );
    result.setResult( true );
  }

  displayResults();

  return result;
}
 
Example 10
Source File: JobEntryAddResultFilenames.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public Result execute( Result result, int nr ) throws KettleException {
  List<RowMetaAndData> rows = result.getRows();
  RowMetaAndData resultRow = null;

  int nrErrFiles = 0;
  result.setResult( true );

  if ( deleteallbefore ) {
    // clear result filenames
    int size = result.getResultFiles().size();
    if ( log.isBasic() ) {
      logBasic( BaseMessages.getString( PKG, "JobEntryAddResultFilenames.log.FilesFound", "" + size ) );
    }

    result.getResultFiles().clear();
    if ( log.isDetailed() ) {
      logDetailed( BaseMessages.getString( PKG, "JobEntryAddResultFilenames.log.DeletedFiles", "" + size ) );
    }
  }

  if ( argFromPrevious ) {
    if ( log.isDetailed() ) {
      logDetailed( BaseMessages.getString( PKG, "JobEntryAddResultFilenames.FoundPreviousRows", String
        .valueOf( ( rows != null ? rows.size() : 0 ) ) ) );
    }
  }

  if ( argFromPrevious && rows != null ) { // Copy the input row to the (command line) arguments
    for ( int iteration = 0; iteration < rows.size() && !parentJob.isStopped(); iteration++ ) {
      resultRow = rows.get( iteration );

      // Get values from previous result
      String filefolder_previous = resultRow.getString( 0, null );
      String fmasks_previous = resultRow.getString( 1, null );

      // ok we can process this file/folder
      if ( log.isDetailed() ) {
        logDetailed( BaseMessages.getString(
          PKG, "JobEntryAddResultFilenames.ProcessingRow", filefolder_previous, fmasks_previous ) );
      }

      if ( !processFile( filefolder_previous, fmasks_previous, parentJob, result ) ) {
        nrErrFiles++;
      }

    }
  } else if ( arguments != null ) {

    for ( int i = 0; i < arguments.length && !parentJob.isStopped(); i++ ) {

      // ok we can process this file/folder
      if ( log.isDetailed() ) {
        logDetailed( BaseMessages.getString(
          PKG, "JobEntryAddResultFilenames.ProcessingArg", arguments[i], filemasks[i] ) );
      }
      if ( !processFile( arguments[i], filemasks[i], parentJob, result ) ) {
        nrErrFiles++;
      }
    }
  }

  if ( nrErrFiles > 0 ) {
    result.setResult( false );
    result.setNrErrors( nrErrFiles );
  }

  return result;
}