Java Code Examples for org.pentaho.di.core.Result#setNrErrors()

The following examples show how to use org.pentaho.di.core.Result#setNrErrors() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: JobEntryTelnet.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public Result execute( Result previousResult, int nr ) {

    Result result = previousResult;

    result.setNrErrors( 1 );
    result.setResult( false );

    String hostname = getRealHostname();
    int port = Const.toInt( getRealPort(), DEFAULT_PORT );
    int timeoutInt = Const.toInt( getRealTimeOut(), -1 );

    if ( Utils.isEmpty( hostname ) ) {
      // No Host was specified
      logError( BaseMessages.getString( PKG, "JobTelnet.SpecifyHost.Label" ) );
      return result;
    }

    try {

      SocketUtil.connectToHost( hostname, port, timeoutInt );

      if ( isDetailed() ) {
        logDetailed( BaseMessages.getString( PKG, "JobTelnet.OK.Label", hostname, port ) );
      }

      result.setNrErrors( 0 );
      result.setResult( true );

    } catch ( Exception ex ) {
      logError( BaseMessages.getString( PKG, "JobTelnet.NOK.Label", hostname, String.valueOf( port ) ) );
      logError( BaseMessages.getString( PKG, "JobTelnet.Error.Label" ) + ex.getMessage() );
    }

    return result;
  }
 
Example 2
Source File: JobEntrySQL.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public Result execute( Result result, int nr ) {

    if ( databaseMeta != null ) {
      try ( Database db = new Database( this, databaseMeta ) ) {
        String theSql = sqlFromFile ? buildSqlFromFile() : sql;
        if ( Utils.isEmpty( theSql ) ) {
          return result;
        }
        db.shareVariablesWith( this );
        db.connect( parentJob.getTransactionId(), null );
        // let it run
        if ( useVariableSubstitution ) {
          theSql = environmentSubstitute( theSql );
        }
        if ( isDetailed() ) {
          logDetailed( BaseMessages.getString( PKG, "JobSQL.Log.SQlStatement", theSql ) );
        }
        if ( sendOneStatement ) {
          db.execStatement( theSql );
        } else {
          db.execStatements( theSql );
        }
      } catch ( KettleDatabaseException je ) {
        result.setNrErrors( 1 );
        logError( BaseMessages.getString( PKG, "JobSQL.ErrorRunJobEntry", je.getMessage() ) );
      }
    } else {
      result.setNrErrors( 1 );
      logError( BaseMessages.getString( PKG, "JobSQL.NoDatabaseConnection" ) );
    }

    result.setResult( result.getNrErrors() == 0 );
    return result;
  }
 
Example 3
Source File: MissingEntryTest.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@Test
public void testExecute() throws KettleJobException {
  MissingEntry entry = spy( new MissingEntry() );
  when( entry.getLogChannel() ).thenReturn( mock( LogChannel.class ) );
  entry.setName( "MissingTest" );
  Result result = new Result();
  result.setNrErrors( 0 );
  result.setResult( true );
  entry.execute( result, 0 );
  assertEquals( 1, result.getNrErrors() );
  assertEquals( false, result.getResult() );
}
 
Example 4
Source File: PaloCubeCreate.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@Override
public Result execute( Result prevResult, int nr ) throws KettleException {

  Result result = new Result( nr );
  result.setResult( false );

  logDetailed( toString(), "Start of processing" );

  // String substitution..
  String realCubeName = environmentSubstitute( getCubeName() );

  PaloHelper database = new PaloHelper( this.getDatabaseMeta(), getLogLevel() );
  try {
    database.connect();
    database.createCube( realCubeName, dimensionNames.toArray( new String[dimensionNames.size()] ) );
    result.setResult( true );
    result.setNrLinesOutput( 1 );
  } catch ( Exception e ) {
    result.setNrErrors( 1 );
    e.printStackTrace();
    logError( toString(), "Error processing Palo Cube Create : " + e.getMessage() );
  } finally {
    database.disconnect();
  }

  return result;
}
 
Example 5
Source File: JobEntryCheckFilesLocked.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public Result execute( Result previousResult, int nr ) {

    Result result = previousResult;
    List<RowMetaAndData> rows = result.getRows();

    oneFileLocked = false;
    result.setResult( true );

    try {
      if ( argFromPrevious  && isDetailed() ) {
        logDetailed( BaseMessages.getString( PKG, "JobEntryCheckFilesLocked.FoundPreviousRows", String
          .valueOf( ( rows != null ? rows.size() : 0 ) ) ) );
      }

      if ( argFromPrevious && rows != null ) {
        processFromPreviousArgument( rows );
      } else if ( arguments != null ) {

        for ( int i = 0; i < arguments.length && !parentJob.isStopped(); i++ ) {
          // ok we can process this file/folder
          if ( isDetailed() ) {
            logDetailed( BaseMessages.getString(
              PKG, "JobEntryCheckFilesLocked.ProcessingArg", arguments[i], filemasks[i] ) );
          }

          processFile( arguments[i], filemasks[i] );
        }
      }

      if ( oneFileLocked ) {
        result.setResult( false );
        result.setNrErrors( 1 );
      }
    } catch ( Exception e ) {
      logError( BaseMessages.getString( PKG, "JobEntryCheckFilesLocked.ErrorRunningJobEntry", e ) );
    }

    return result;
  }
 
Example 6
Source File: JobEntryFileExists.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public Result execute( Result previousResult, int nr ) {
  Result result = previousResult;
  result.setResult( false );
  result.setNrErrors( 0 );

  if ( filename != null ) {
    //Set Embedded NamedCluter MetatStore Provider Key so that it can be passed to VFS
    if ( parentJobMeta.getNamedClusterEmbedManager() != null ) {
      parentJobMeta.getNamedClusterEmbedManager()
        .passEmbeddedMetastoreKey( this, parentJobMeta.getEmbeddedMetastoreProviderKey() );
    }

    String realFilename = getRealFilename();
    try {
      FileObject file = KettleVFS.getFileObject( realFilename, this );
      if ( file.exists() && file.isReadable() ) {
        logDetailed( BaseMessages.getString( PKG, "JobEntryFileExists.File_Exists", realFilename ) );
        result.setResult( true );
      } else {
        logDetailed( BaseMessages.getString( PKG, "JobEntryFileExists.File_Does_Not_Exist", realFilename ) );
      }
    } catch ( Exception e ) {
      result.setNrErrors( 1 );
      logError( BaseMessages.getString( PKG, "JobEntryFileExists.ERROR_0004_IO_Exception", e.getMessage() ), e );
    }
  } else {
    result.setNrErrors( 1 );
    logError( BaseMessages.getString( PKG, "JobEntryFileExists.ERROR_0005_No_Filename_Defined" ) );
  }

  return result;
}
 
Example 7
Source File: JobEntryDTDValidator.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public Result execute( Result previousResult, int nr ) {
  Result result = previousResult;
  result.setResult( true );

  String realxmlfilename = getRealxmlfilename();
  String realDTDfilename = getRealDTDfilename();

  // Define a new DTD validator instance
  DTDValidator validator = new DTDValidator( log );
  // Set XML filename
  validator.setXMLFilename( realxmlfilename );
  if ( dtdintern ) {
    // The DTD is intern to XML file
    validator.setInternDTD( true );
  } else {
    // The DTD is extern
    // set the DTD filename
    validator.setDTDFilename( realDTDfilename );
  }
  // Validate the XML file and return the status
  boolean status = validator.validate();
  if ( !status ) {
    // The XML file is invalid!
    log.logError( validator.getErrorMessage() );
    result.setResult( false );
    result.setNrErrors( validator.getNrErrors() );
    result.setLogText( validator.getErrorMessage() );
  }

  return result;
}
 
Example 8
Source File: JobEntryTableExists.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public Result execute( Result previousResult, int nr ) {
  Result result = previousResult;
  result.setResult( false );

  if ( connection != null ) {
    Database db = new Database( this, connection );
    db.shareVariablesWith( this );
    try {
      db.connect( parentJob.getTransactionId(), null );
      String realTablename = environmentSubstitute( tablename );
      String realSchemaname = environmentSubstitute( schemaname );

      if ( db.checkTableExists( realSchemaname, realTablename ) ) {
        if ( log.isDetailed() ) {
          logDetailed( BaseMessages.getString( PKG, "TableExists.Log.TableExists", realTablename ) );
        }
        result.setResult( true );
      } else {
        if ( log.isDetailed() ) {
          logDetailed( BaseMessages.getString( PKG, "TableExists.Log.TableNotExists", realTablename ) );
        }
      }
    } catch ( KettleDatabaseException dbe ) {
      result.setNrErrors( 1 );
      logError( BaseMessages.getString( PKG, "TableExists.Error.RunningJobEntry", dbe.getMessage() ) );
    } finally {
      if ( db != null ) {
        try {
          db.disconnect();
        } catch ( Exception e ) { /* Ignore */
        }
      }
    }
  } else {
    result.setNrErrors( 1 );
    logError( BaseMessages.getString( PKG, "TableExists.Error.NoConnectionDefined" ) );
  }

  return result;
}
 
Example 9
Source File: JobEntryDeleteFiles.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public Result execute( Result result, int nr ) throws KettleException {
  List<RowMetaAndData> resultRows = result.getRows();

  int numberOfErrFiles = 0;
  result.setResult( false );
  result.setNrErrors( 1 );

  if ( argFromPrevious && log.isDetailed() ) {
    logDetailed( BaseMessages.getString( PKG, "JobEntryDeleteFiles.FoundPreviousRows", String
      .valueOf( ( resultRows != null ? resultRows.size() : 0 ) ) ) );
  }

  //Set Embedded NamedCluter MetatStore Provider Key so that it can be passed to VFS
  if ( parentJobMeta.getNamedClusterEmbedManager() != null ) {
    parentJobMeta.getNamedClusterEmbedManager()
      .passEmbeddedMetastoreKey( this, parentJobMeta.getEmbeddedMetastoreProviderKey() );
  }

  Multimap<String, String> pathToMaskMap = populateDataForJobExecution( resultRows );

  for ( Map.Entry<String, String> pathToMask : pathToMaskMap.entries() ) {
    final String filePath = environmentSubstitute( pathToMask.getKey() );
    if ( filePath.trim().isEmpty() ) {
      // Relative paths are permitted, and providing an empty path means deleting all files inside a root pdi-folder.
      // It is much more likely to be a mistake than a desirable action, so we don't delete anything (see PDI-15181)
      if ( log.isDetailed() ) {
        logDetailed( BaseMessages.getString( PKG, "JobEntryDeleteFiles.NoPathProvided" ) );
      }
    } else {
      final String fileMask = environmentSubstitute( pathToMask.getValue() );

      if ( parentJob.isStopped() ) {
        break;
      }

      if ( !processFile( filePath, fileMask, parentJob ) ) {
        numberOfErrFiles++;
      }
    }
  }

  if ( numberOfErrFiles == 0 ) {
    result.setResult( true );
    result.setNrErrors( 0 );
  } else {
    result.setNrErrors( numberOfErrFiles );
    result.setResult( false );
  }

  return result;
}
 
Example 10
Source File: Trans.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
/**
 * Gets the result of the transformation. The Result object contains such measures as the number of errors, number of
 * lines read/written/input/output/updated/rejected, etc.
 *
 * @return the Result object containing resulting measures from execution of the transformation
 */
public Result getResult() {
  if ( steps == null ) {
    return null;
  }

  Result result = new Result();
  result.setNrErrors( errors.longValue() );
  result.setResult( errors.longValue() == 0 );
  TransLogTable transLogTable = transMeta.getTransLogTable();

  for ( int i = 0; i < steps.size(); i++ ) {
    StepMetaDataCombi sid = steps.get( i );
    StepInterface step = sid.step;

    result.setNrErrors( result.getNrErrors() + sid.step.getErrors() );
    result.getResultFiles().putAll( step.getResultFiles() );

    if ( step.isSafeStopped() ) {
      result.setSafeStop( step.isSafeStopped() );
    }

    if ( step.getStepname().equals( transLogTable.getSubjectString( TransLogTable.ID.LINES_READ ) ) ) {
      result.setNrLinesRead( result.getNrLinesRead() + step.getLinesRead() );
    }
    if ( step.getStepname().equals( transLogTable.getSubjectString( TransLogTable.ID.LINES_INPUT ) ) ) {
      result.setNrLinesInput( result.getNrLinesInput() + step.getLinesInput() );
    }
    if ( step.getStepname().equals( transLogTable.getSubjectString( TransLogTable.ID.LINES_WRITTEN ) ) ) {
      result.setNrLinesWritten( result.getNrLinesWritten() + step.getLinesWritten() );
    }
    if ( step.getStepname().equals( transLogTable.getSubjectString( TransLogTable.ID.LINES_OUTPUT ) ) ) {
      result.setNrLinesOutput( result.getNrLinesOutput() + step.getLinesOutput() );
    }
    if ( step.getStepname().equals( transLogTable.getSubjectString( TransLogTable.ID.LINES_UPDATED ) ) ) {
      result.setNrLinesUpdated( result.getNrLinesUpdated() + step.getLinesUpdated() );
    }
    if ( step.getStepname().equals( transLogTable.getSubjectString( TransLogTable.ID.LINES_REJECTED ) ) ) {
      result.setNrLinesRejected( result.getNrLinesRejected() + step.getLinesRejected() );
    }
  }

  result.setRows( resultRows );
  if ( !Utils.isEmpty( resultFiles ) ) {
    result.setResultFiles( new HashMap<String, ResultFile>() );
    for ( ResultFile resultFile : resultFiles ) {
      result.getResultFiles().put( resultFile.toString(), resultFile );
    }
  }
  result.setStopped( isStopped() );
  result.setLogChannelId( log.getLogChannelId() );

  return result;
}
 
Example 11
Source File: JobEntryCreateFile.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public Result execute( Result previousResult, int nr ) throws KettleException {
  Result result = previousResult;
  result.setResult( false );

  if ( filename != null ) {
    //Set Embedded NamedCluter MetatStore Provider Key so that it can be passed to VFS
    if ( parentJobMeta.getNamedClusterEmbedManager() != null ) {
      parentJobMeta.getNamedClusterEmbedManager()
        .passEmbeddedMetastoreKey( this, parentJobMeta.getEmbeddedMetastoreProviderKey() );
    }

    String realFilename = getRealFilename();
    FileObject fileObject = null;
    try {
      fileObject = KettleVFS.getFileObject( realFilename, this );

      if ( fileObject.exists() ) {
        if ( isFailIfFileExists() ) {
          // File exists and fail flag is on.
          result.setResult( false );
          logError( "File [" + realFilename + "] exists, failing." );
        } else {
          // File already exists, no reason to try to create it
          result.setResult( true );
          logBasic( "File [" + realFilename + "] already exists, not recreating." );
        }
        // add filename to result filenames if needed
        if ( isAddFilenameToResult() ) {
          addFilenameToResult( realFilename, result, parentJob );
        }
      } else {
        // No file yet, create an empty file.
        fileObject.createFile();
        logBasic( "File [" + realFilename + "] created!" );
        // add filename to result filenames if needed
        if ( isAddFilenameToResult() ) {
          addFilenameToResult( realFilename, result, parentJob );
        }
        result.setResult( true );
      }
    } catch ( IOException e ) {
      logError( "Could not create file [" + realFilename + "], exception: " + e.getMessage() );
      result.setResult( false );
      result.setNrErrors( 1 );
    } finally {
      if ( fileObject != null ) {
        try {
          fileObject.close();
          fileObject = null;
        } catch ( IOException ex ) {
          // Ignore
        }
      }
    }
  } else {
    logError( "No filename is defined." );
  }

  return result;
}
 
Example 12
Source File: JobEntryColumnsExist.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public Result execute( Result previousResult, int nr ) {
  Result result = previousResult;
  result.setResult( false );
  result.setNrErrors( 1 );

  int nrexistcolums = 0;
  int nrnotexistcolums = 0;

  if ( Utils.isEmpty( tablename ) ) {
    logError( BaseMessages.getString( PKG, "JobEntryColumnsExist.Error.TablenameEmpty" ) );
    return result;
  }
  if ( arguments == null ) {
    logError( BaseMessages.getString( PKG, "JobEntryColumnsExist.Error.ColumnameEmpty" ) );
    return result;
  }
  if ( connection != null ) {
    Database db = getNewDatabaseFromMeta();
    db.shareVariablesWith( this );
    try {
      String realSchemaname = environmentSubstitute( schemaname );
      String realTablename = environmentSubstitute( tablename );

      db.connect( parentJob.getTransactionId(), null );

      if ( db.checkTableExists( realSchemaname, realTablename ) ) {
        if ( log.isDetailed() ) {
          logDetailed( BaseMessages.getString( PKG, "JobEntryColumnsExist.Log.TableExists", realTablename ) );
        }

        for ( int i = 0; i < arguments.length && !parentJob.isStopped(); i++ ) {
          String realColumnname = environmentSubstitute( arguments[i] );

          if ( db.checkColumnExists( realSchemaname, realTablename, realColumnname ) ) {
            if ( log.isDetailed() ) {
              logDetailed( BaseMessages.getString(
                PKG, "JobEntryColumnsExist.Log.ColumnExists", realColumnname, realTablename ) );
            }
            nrexistcolums++;
          } else {
            logError( BaseMessages.getString(
              PKG, "JobEntryColumnsExist.Log.ColumnNotExists", realColumnname, realTablename ) );
            nrnotexistcolums++;
          }
        }
      } else {
        logError( BaseMessages.getString( PKG, "JobEntryColumnsExist.Log.TableNotExists", realTablename ) );
      }
    } catch ( KettleDatabaseException dbe ) {
      logError( BaseMessages.getString( PKG, "JobEntryColumnsExist.Error.UnexpectedError", dbe.getMessage() ) );
    } finally {
      if ( db != null ) {
        try {
          db.disconnect();
        } catch ( Exception e ) { /* Ignore */
        }
      }
    }
  } else {
    logError( BaseMessages.getString( PKG, "JobEntryColumnsExist.Error.NoDbConnection" ) );
  }

  result.setEntryNr( nrnotexistcolums );
  result.setNrLinesWritten( nrexistcolums );
  // result is true only if all columns found (PDI-15801)
  if (  nrexistcolums == arguments.length ) {
    result.setNrErrors( 0 );
    result.setResult( true );
  }
  return result;
}
 
Example 13
Source File: JobEntryFilesExist.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public Result execute( Result previousResult, int nr ) {
  Result result = previousResult;
  result.setResult( false );
  result.setNrErrors( 0 );
  int missingfiles = 0;
  int nrErrors = 0;

  // see PDI-10270 for details
  boolean oldBehavior =
    "Y".equalsIgnoreCase( getVariable( Const.KETTLE_COMPATIBILITY_SET_ERROR_ON_SPECIFIC_JOB_ENTRIES, "N" ) );

  if ( arguments != null ) {
    for ( int i = 0; i < arguments.length && !parentJob.isStopped(); i++ ) {
      FileObject file = null;

      try {
        String realFilefoldername = environmentSubstitute( arguments[i] );
        //Set Embedded NamedCluter MetatStore Provider Key so that it can be passed to VFS
        if ( parentJobMeta.getNamedClusterEmbedManager() != null ) {
          parentJobMeta.getNamedClusterEmbedManager()
            .passEmbeddedMetastoreKey( this, parentJobMeta.getEmbeddedMetastoreProviderKey() );
        }
        file = KettleVFS.getFileObject( realFilefoldername, this );

        if ( file.exists() && file.isReadable() ) { // TODO: is it needed to check file for readability?
          if ( log.isDetailed() ) {
            logDetailed( BaseMessages.getString( PKG, "JobEntryFilesExist.File_Exists", realFilefoldername ) );
          }
        } else {
          missingfiles++;
          if ( log.isDetailed() ) {
            logDetailed( BaseMessages.getString(
              PKG, "JobEntryFilesExist.File_Does_Not_Exist", realFilefoldername ) );
          }
        }

      } catch ( Exception e ) {
        nrErrors++;
        missingfiles++;
        logError( BaseMessages.getString( PKG, "JobEntryFilesExist.ERROR_0004_IO_Exception", e.toString() ), e );
      } finally {
        if ( file != null ) {
          try {
            file.close();
            file = null;
          } catch ( IOException ex ) { /* Ignore */
          }
        }
      }
    }

  }

  result.setNrErrors( nrErrors );

  if ( oldBehavior ) {
    result.setNrErrors( missingfiles );
  }

  if ( missingfiles == 0 ) {
    result.setResult( true );
  }

  return result;
}
 
Example 14
Source File: JobEntryCreateFolder.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public Result execute( Result previousResult, int nr ) {
  Result result = previousResult;
  result.setResult( false );

  if ( foldername != null ) {
    //Set Embedded NamedCluter MetatStore Provider Key so that it can be passed to VFS
    if ( parentJobMeta.getNamedClusterEmbedManager() != null ) {
      parentJobMeta.getNamedClusterEmbedManager()
        .passEmbeddedMetastoreKey( this, parentJobMeta.getEmbeddedMetastoreProviderKey() );
    }

    String realFoldername = getRealFoldername();
    FileObject folderObject = null;
    try {
      folderObject = KettleVFS.getFileObject( realFoldername, this );

      if ( folderObject.exists() ) {
        boolean isFolder = false;

        // Check if it's a folder
        if ( folderObject.getType() == FileType.FOLDER ) {
          isFolder = true;
        }

        if ( isFailOfFolderExists() ) {
          // Folder exists and fail flag is on.
          result.setResult( false );
          if ( isFolder ) {
            logError( "Folder [" + realFoldername + "] exists, failing." );
          } else {
            logError( "File [" + realFoldername + "] exists, failing." );
          }
        } else {
          // Folder already exists, no reason to try to create it
          result.setResult( true );
          if ( log.isDetailed() ) {
            logDetailed( "Folder [" + realFoldername + "] already exists, not recreating." );
          }
        }

      } else {
        // No Folder yet, create an empty Folder.
        folderObject.createFolder();
        if ( log.isDetailed() ) {
          logDetailed( "Folder [" + realFoldername + "] created!" );
        }
        result.setResult( true );
      }
    } catch ( Exception e ) {
      logError( "Could not create Folder [" + realFoldername + "]", e );
      result.setResult( false );
      result.setNrErrors( 1 );
    } finally {
      if ( folderObject != null ) {
        try {
          folderObject.close();
        } catch ( IOException ex ) { /* Ignore */
        }
      }
    }
  } else {
    logError( "No Foldername is defined." );
  }

  return result;
}
 
Example 15
Source File: JobEntryPing.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public Result execute( Result previousResult, int nr ) {
  Result result = previousResult;

  result.setNrErrors( 1 );
  result.setResult( false );

  String hostname = getRealHostname();
  int timeoutInt = Const.toInt( getRealTimeOut(), 300 );
  int packets = Const.toInt( getRealNbrPackets(), 2 );
  boolean status = false;

  if ( Utils.isEmpty( hostname ) ) {
    // No Host was specified
    logError( BaseMessages.getString( PKG, "JobPing.SpecifyHost.Label" ) );
    return result;
  }

  try {
    if ( ipingtype == isystemPing || ipingtype == ibothPings ) {
      // Perform a system (Java) ping ...
      status = systemPing( hostname, timeoutInt );
      if ( status ) {
        if ( log.isDetailed() ) {
          log.logDetailed( BaseMessages.getString( PKG, "JobPing.SystemPing" ), BaseMessages.getString(
            PKG, "JobPing.OK.Label", hostname ) );
        }
      } else {
        log.logError( BaseMessages.getString( PKG, "JobPing.SystemPing" ), BaseMessages.getString(
          PKG, "JobPing.NOK.Label", hostname ) );
      }
    }
    if ( ( ipingtype == iclassicPing ) || ( ipingtype == ibothPings && !status ) ) {
      // Perform a classic ping ..
      status = classicPing( hostname, packets );
      if ( status ) {
        if ( log.isDetailed() ) {
          log.logDetailed( BaseMessages.getString( PKG, "JobPing.ClassicPing" ), BaseMessages.getString(
            PKG, "JobPing.OK.Label", hostname ) );
        }
      } else {
        log.logError( BaseMessages.getString( PKG, "JobPing.ClassicPing" ), BaseMessages.getString(
          PKG, "JobPing.NOK.Label", hostname ) );
      }
    }
  } catch ( Exception ex ) {
    logError( BaseMessages.getString( PKG, "JobPing.Error.Label" ) + ex.getMessage() );
  }
  if ( status ) {
    if ( log.isDetailed() ) {
      logDetailed( BaseMessages.getString( PKG, "JobPing.OK.Label", hostname ) );
    }
    result.setNrErrors( 0 );
    result.setResult( true );
  } else {
    logError( BaseMessages.getString( PKG, "JobPing.NOK.Label", hostname ) );
  }
  return result;
}
 
Example 16
Source File: TransWebSocketEngineAdapter.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
@Override
public Result getResult() {
  Result toRet = new Result();
  toRet.setNrErrors( getErrors() );
  return toRet;
}
 
Example 17
Source File: JobEntryDosToUnix.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public Result execute( Result previousResult, int nr ) throws KettleException {
  Result result = previousResult;
  result.setNrErrors( 1 );
  result.setResult( false );

  List<RowMetaAndData> rows = previousResult.getRows();
  RowMetaAndData resultRow = null;

  nrErrors = 0;
  nrProcessedFiles = 0;
  nrErrorFiles = 0;
  limitFiles = Const.toInt( environmentSubstitute( getNrErrorsLessThan() ), 10 );
  successConditionBroken = false;
  successConditionBrokenExit = false;
  tempFolder = environmentSubstitute( "%%java.io.tmpdir%%" );

  // Get source and destination files, also wildcard
  String[] vsourcefilefolder = source_filefolder;
  String[] vwildcard = wildcard;

  if ( arg_from_previous ) {
    if ( isDetailed() ) {
      logDetailed( BaseMessages.getString( PKG, "JobDosToUnix.Log.ArgFromPrevious.Found", ( rows != null ? rows
        .size() : 0 )
        + "" ) );
    }

  }
  if ( arg_from_previous && rows != null ) {
    // Copy the input row to the (command line) arguments
    for ( int iteration = 0; iteration < rows.size() && !parentJob.isStopped(); iteration++ ) {
      if ( successConditionBroken ) {
        if ( !successConditionBrokenExit ) {
          logError( BaseMessages.getString( PKG, "JobDosToUnix.Error.SuccessConditionbroken", "" + nrAllErrors ) );
          successConditionBrokenExit = true;
        }
        result.setEntryNr( nrAllErrors );
        result.setNrLinesRejected( nrErrorFiles );
        result.setNrLinesWritten( nrProcessedFiles );
        return result;
      }

      resultRow = rows.get( iteration );

      // Get source and destination file names, also wildcard
      String vsourcefilefolder_previous = resultRow.getString( 0, null );
      String vwildcard_previous = resultRow.getString( 1, null );
      int convertion_type = JobEntryDosToUnix.getConversionTypeByCode( resultRow.getString( 2, null ) );

      if ( isDetailed() ) {
        logDetailed( BaseMessages.getString(
          PKG, "JobDosToUnix.Log.ProcessingRow", vsourcefilefolder_previous, vwildcard_previous ) );
      }

      processFileFolder( vsourcefilefolder_previous, vwildcard_previous, convertion_type, parentJob, result );
    }
  } else if ( vsourcefilefolder != null ) {
    for ( int i = 0; i < vsourcefilefolder.length && !parentJob.isStopped(); i++ ) {
      if ( successConditionBroken ) {
        if ( !successConditionBrokenExit ) {
          logError( BaseMessages.getString( PKG, "JobDosToUnix.Error.SuccessConditionbroken", "" + nrAllErrors ) );
          successConditionBrokenExit = true;
        }
        result.setEntryNr( nrAllErrors );
        result.setNrLinesRejected( nrErrorFiles );
        result.setNrLinesWritten( nrProcessedFiles );
        return result;
      }

      if ( isDetailed() ) {
        logDetailed( BaseMessages.getString(
          PKG, "JobDosToUnix.Log.ProcessingRow", vsourcefilefolder[i], vwildcard[i] ) );
      }

      processFileFolder( vsourcefilefolder[i], vwildcard[i], conversionTypes[i], parentJob, result );

    }
  }

  // Success Condition
  result.setNrErrors( nrAllErrors );
  result.setNrLinesRejected( nrErrorFiles );
  result.setNrLinesWritten( nrProcessedFiles );
  if ( getSuccessStatus() ) {
    result.setNrErrors( 0 );
    result.setResult( true );
  }

  displayResults();

  return result;
}
 
Example 18
Source File: JobEntrySendNagiosPassiveCheck.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public Result execute( Result previousResult, int nr ) {
  log.logBasic( BaseMessages.getString( PKG, "JobEntrySendNagiosPassiveCheck.Started", serverName ) );

  Result result = previousResult;
  result.setNrErrors( 1 );
  result.setResult( false );

  // Target
  String realServername = environmentSubstitute( serverName );
  String realPassword = Utils.resolvePassword( variables, password );
  int realPort = Const.toInt( environmentSubstitute( port ), DEFAULT_PORT );
  int realResponseTimeOut = Const.toInt( environmentSubstitute( responseTimeOut ), DEFAULT_RESPONSE_TIME_OUT );
  int realConnectionTimeOut =
    Const.toInt( environmentSubstitute( connectionTimeOut ), DEFAULT_CONNECTION_TIME_OUT );

  // Sender
  String realSenderServerName = environmentSubstitute( senderServerName );
  String realSenderServiceName = environmentSubstitute( senderServiceName );

  try {
    if ( Utils.isEmpty( realServername ) ) {
      throw new KettleException( BaseMessages.getString(
        PKG, "JobSendNagiosPassiveCheck.Error.TargetServerMissing" ) );
    }

    String realMessageString = environmentSubstitute( message );

    if ( Utils.isEmpty( realMessageString ) ) {
      throw new KettleException( BaseMessages.getString( PKG, "JobSendNagiosPassiveCheck.Error.MessageMissing" ) );
    }

    Level level = Level.UNKNOWN;
    switch ( getLevel() ) {
      case LEVEL_TYPE_OK:
        level = Level.OK;
        break;
      case LEVEL_TYPE_CRITICAL:
        level = Level.CRITICAL;
        break;
      case LEVEL_TYPE_WARNING:
        level = Level.WARNING;
        break;
      default:
        break;
    }
    Encryption encr = Encryption.NONE;
    switch ( getEncryptionMode() ) {
      case ENCRYPTION_MODE_TRIPLEDES:
        encr = Encryption.TRIPLE_DES;
        break;
      case ENCRYPTION_MODE_XOR:
        encr = Encryption.XOR;
        break;
      default:
        break;
    }

    // settings
    NagiosSettingsBuilder ns = new NagiosSettingsBuilder();
    ns.withNagiosHost( realServername );
    ns.withPort( realPort );
    ns.withConnectionTimeout( realConnectionTimeOut );
    ns.withResponseTimeout( realResponseTimeOut );
    ns.withEncryption( encr );
    if ( !Utils.isEmpty( realPassword ) ) {
      ns.withPassword( realPassword );
    } else {
      ns.withNoPassword();
    }

    // target nagios host
    NagiosSettings settings = ns.create();

    // sender
    MessagePayloadBuilder pb = new MessagePayloadBuilder();
    if ( !Utils.isEmpty( realSenderServerName ) ) {
      pb.withHostname( realSenderServerName );
    }
    pb.withLevel( level );
    if ( !Utils.isEmpty( realSenderServiceName ) ) {
      pb.withServiceName( realSenderServiceName );
    }
    pb.withMessage( realMessageString );
    MessagePayload payload = pb.create();

    NagiosPassiveCheckSender sender = new NagiosPassiveCheckSender( settings );

    sender.send( payload );

    result.setNrErrors( 0 );
    result.setResult( true );

  } catch ( Exception e ) {
    log.logError( BaseMessages.getString( PKG, "JobEntrySendNagiosPassiveCheck.ErrorGetting", e.toString() ) );
  }

  return result;
}
 
Example 19
Source File: JobEntryMSAccessBulkLoad.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public Result execute( Result previousResult, int nr ) {
  Result result = previousResult;

  List<RowMetaAndData> rows = result.getRows();
  RowMetaAndData resultRow = null;
  result.setResult( false );

  NrErrors = 0;
  NrSuccess = 0;
  NrFilesToProcess = 0;
  continueProcessing = true;
  limitFiles = Const.toInt( environmentSubstitute( getLimit() ), 10 );

  // Get source and destination files, also wildcard
  String[] vsourceFilefolder = source_filefolder;
  String[] vsourceWildcard = source_wildcard;
  String[] vsourceDelimiter = delimiter;
  String[] targetDb = target_Db;
  String[] targetTable = target_table;

  try {

    if ( is_args_from_previous ) {
      if ( log.isDetailed() ) {
        logDetailed( BaseMessages.getString(
          PKG, "JobEntryMSAccessBulkLoad.Log.ArgFromPrevious.Found", ( rows != null ? rows.size() : 0 ) + "" ) );
      }
    }
    if ( is_args_from_previous && rows != null ) {
      for ( int iteration = 0; iteration < rows.size()
        && !parentJob.isStopped()
        && continueProcessing; iteration++ ) {
        resultRow = rows.get( iteration );

        // Get source and destination file names, also wildcard
        String vSourceFileFolder_previous = resultRow.getString( 0, null );
        String vSourceWildcard_previous = resultRow.getString( 1, null );
        String vDelimiter_previous = resultRow.getString( 2, null );
        String vTargetDb_previous = resultRow.getString( 3, null );
        String vTargetTable_previous = resultRow.getString( 4, null );

        processOneRow(
          vSourceFileFolder_previous, vSourceWildcard_previous, vDelimiter_previous, vTargetDb_previous,
          vTargetTable_previous, parentJob, result );

      }
    } else if ( vsourceFilefolder != null && targetDb != null && targetTable != null ) {
      for ( int i = 0; i < vsourceFilefolder.length && !parentJob.isStopped() && continueProcessing; i++ ) {
        // get real values
        String realSourceFileFolder = environmentSubstitute( vsourceFilefolder[i] );
        String realSourceWildcard = environmentSubstitute( vsourceWildcard[i] );
        String realSourceDelimiter = environmentSubstitute( vsourceDelimiter[i] );
        String realTargetDb = environmentSubstitute( targetDb[i] );
        String realTargetTable = environmentSubstitute( targetTable[i] );

        processOneRow(
          realSourceFileFolder, realSourceWildcard, realSourceDelimiter, realTargetDb, realTargetTable,
          parentJob, result );
      }
    }
  } catch ( Exception e ) {
    incrErrors();
    logError( BaseMessages.getString( PKG, "JobEntryMSAccessBulkLoad.UnexpectedError", e.getMessage() ) );
  }

  // Success Condition
  result.setNrErrors( NrErrors );
  result.setNrLinesInput( NrFilesToProcess );
  result.setNrLinesWritten( NrSuccess );
  if ( getSuccessStatus() ) {
    result.setResult( true );
  }

  displayResults();
  return result;
}
 
Example 20
Source File: JobEntrySuccess.java    From pentaho-kettle with Apache License 2.0 3 votes vote down vote up
/**
 * Execute this job entry and return the result. In this case it means, just set the result boolean in the Result
 * class.
 *
 * @param previousResult
 *          The result of the previous execution
 * @return The Result of the execution.
 */
public Result execute( Result previousResult, int nr ) {
  Result result = previousResult;
  result.setNrErrors( 0 );
  result.setResult( true );

  return result;
}