Java Code Examples for org.pentaho.di.core.Const#toDouble()

The following examples show how to use org.pentaho.di.core.Const#toDouble() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ServerStatus.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public ServerStatus( Node statusNode ) throws KettleException {
  this();
  statusDescription = XMLHandler.getTagValue( statusNode, "statusdesc" );

  memoryFree = Const.toLong( XMLHandler.getTagValue( statusNode, "memory_free" ), -1L );
  memoryTotal = Const.toLong( XMLHandler.getTagValue( statusNode, "memory_total" ), -1L );

  String cpuCoresStr = XMLHandler.getTagValue( statusNode, "cpu_cores" );
  cpuCores = Const.toInt( cpuCoresStr, -1 );
  String cpuProcessTimeStr = XMLHandler.getTagValue( statusNode, "cpu_process_time" );
  cpuProcessTime = Utils.isEmpty( cpuProcessTimeStr ) ? 0L : Long.valueOf( cpuProcessTimeStr );

  uptime = Const.toLong( XMLHandler.getTagValue( statusNode, "uptime" ), -1 );
  threadCount = Const.toInt( XMLHandler.getTagValue( statusNode, "thread_count" ), -1 );
  loadAvg = Const.toDouble( XMLHandler.getTagValue( statusNode, "load_avg" ), -1.0 );

  osName = XMLHandler.getTagValue( statusNode, "os_name" );
  osVersion = XMLHandler.getTagValue( statusNode, "os_version" );
  osArchitecture = XMLHandler.getTagValue( statusNode, "os_arch" );
}
 
Example 2
Source File: SlaveServerStatus.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public SlaveServerStatus( Node statusNode ) throws KettleException {
  this();
  statusDescription = XMLHandler.getTagValue( statusNode, "statusdesc" );

  memoryFree = Const.toLong( XMLHandler.getTagValue( statusNode, "memory_free" ), -1L );
  memoryTotal = Const.toLong( XMLHandler.getTagValue( statusNode, "memory_total" ), -1L );

  String cpuCoresStr = XMLHandler.getTagValue( statusNode, "cpu_cores" );
  cpuCores = Const.toInt( cpuCoresStr, -1 );
  String cpuProcessTimeStr = XMLHandler.getTagValue( statusNode, "cpu_process_time" );
  cpuProcessTime = Utils.isEmpty( cpuProcessTimeStr ) ? 0L : Long.valueOf( cpuProcessTimeStr );

  uptime = Const.toLong( XMLHandler.getTagValue( statusNode, "uptime" ), -1 );
  threadCount = Const.toInt( XMLHandler.getTagValue( statusNode, "thread_count" ), -1 );
  loadAvg = Const.toDouble( XMLHandler.getTagValue( statusNode, "load_avg" ), -1.0 );

  osName = XMLHandler.getTagValue( statusNode, "os_name" );
  osVersion = XMLHandler.getTagValue( statusNode, "os_version" );
  osArchitecture = XMLHandler.getTagValue( statusNode, "os_arch" );

  Node listTransNode = XMLHandler.getSubNode( statusNode, "transstatuslist" );
  Node listJobsNode = XMLHandler.getSubNode( statusNode, "jobstatuslist" );

  int nrTrans = XMLHandler.countNodes( listTransNode, SlaveServerTransStatus.XML_TAG );
  int nrJobs = XMLHandler.countNodes( listJobsNode, SlaveServerJobStatus.XML_TAG );

  for ( int i = 0; i < nrTrans; i++ ) {
    Node transStatusNode = XMLHandler.getSubNodeByNr( listTransNode, SlaveServerTransStatus.XML_TAG, i );
    transStatusList.add( new SlaveServerTransStatus( transStatusNode ) );
  }

  for ( int i = 0; i < nrJobs; i++ ) {
    Node jobStatusNode = XMLHandler.getSubNodeByNr( listJobsNode, SlaveServerJobStatus.XML_TAG, i );
    jobStatusList.add( new SlaveServerJobStatus( jobStatusNode ) );
  }
}
 
Example 3
Source File: KettleBeamPipelineExecutor.java    From kettle-beam with Apache License 2.0 4 votes vote down vote up
private void configureSparkOptions( BeamJobConfig config, SparkPipelineOptions options, VariableSpace space, String transformationName ) throws IOException {

    // options.setFilesToStage( BeamConst.findLibraryFilesToStage( null, config.getPluginsToStage(), true, true ) );

    if ( StringUtils.isNotEmpty( config.getSparkMaster() ) ) {
      options.setSparkMaster( space.environmentSubstitute( config.getSparkMaster() ) );
    }
    if ( StringUtils.isNotEmpty( config.getSparkBatchIntervalMillis() ) ) {
      long interval = Const.toLong( space.environmentSubstitute( config.getSparkBatchIntervalMillis() ), -1L );
      if ( interval >= 0 ) {
        options.setBatchIntervalMillis( interval );
      }
    }
    if ( StringUtils.isNotEmpty( config.getSparkCheckpointDir() ) ) {
      options.setCheckpointDir( space.environmentSubstitute( config.getSparkCheckpointDir() ) );
    }
    if ( StringUtils.isNotEmpty( config.getSparkCheckpointDurationMillis() ) ) {
      long duration = Const.toLong( space.environmentSubstitute( config.getSparkCheckpointDurationMillis() ), -1L );
      if ( duration >= 0 ) {
        options.setCheckpointDurationMillis( duration );
      }
    }
    if ( StringUtils.isNotEmpty( config.getSparkMaxRecordsPerBatch() ) ) {
      long records = Const.toLong( space.environmentSubstitute( config.getSparkMaxRecordsPerBatch() ), -1L );
      if ( records >= 0 ) {
        options.setMaxRecordsPerBatch( records );
      }
    }
    if ( StringUtils.isNotEmpty( config.getSparkMinReadTimeMillis() ) ) {
      long readTime = Const.toLong( space.environmentSubstitute( config.getSparkMinReadTimeMillis() ), -1L );
      if ( readTime >= 0 ) {
        options.setMinReadTimeMillis( readTime );
      }
    }
    if ( StringUtils.isNotEmpty( config.getSparkReadTimePercentage() ) ) {
      double percentage = Const.toDouble( space.environmentSubstitute( config.getSparkReadTimePercentage() ), -1.0 );
      if ( percentage >= 0 ) {
        options.setReadTimePercentage( percentage / 100 );
      }
    }
    if ( StringUtils.isNotEmpty( config.getSparkBundleSize() ) ) {
      long bundleSize = Const.toLong( space.environmentSubstitute( config.getSparkBundleSize() ), -1L );
      if ( bundleSize >= 0 ) {
        options.setBundleSize( bundleSize );
      }
    }
    if ( StringUtils.isNotEmpty( config.getSparkStorageLevel() ) ) {
      options.setStorageLevel( space.environmentSubstitute( config.getSparkStorageLevel() ) );
    }
    String appName = transformationName.replace( " ", "_" );
    options.setAppName( appName );
  }
 
Example 4
Source File: FuzzyMatch.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public boolean init( StepMetaInterface smi, StepDataInterface sdi ) {
  meta = (FuzzyMatchMeta) smi;
  data = (FuzzyMatchData) sdi;

  if ( super.init( smi, sdi ) ) {

    // Check lookup and main stream field
    if ( Utils.isEmpty( meta.getMainStreamField() ) ) {
      logError( BaseMessages.getString( PKG, "FuzzyMatch.Error.MainStreamFieldMissing" ) );
      return false;
    }
    if ( Utils.isEmpty( meta.getLookupField() ) ) {
      logError( BaseMessages.getString( PKG, "FuzzyMatch.Error.LookupStreamFieldMissing" ) );
      return false;
    }

    // Checks output fields
    String matchField = environmentSubstitute( meta.getOutputMatchField() );
    if ( Utils.isEmpty( matchField ) ) {
      logError( BaseMessages.getString( PKG, "FuzzyMatch.Error.OutputMatchFieldMissing" ) );
      return false;
    }

    // We need to add metrics (distance, similarity, ...)
    // only when the fieldname is provided
    // and user want to return the closer value
    data.addValueFieldName =
      ( !Utils.isEmpty( environmentSubstitute( meta.getOutputValueField() ) ) && meta.isGetCloserValue() );

    // Set the number of fields to cache
    // default value is one
    int nrFields = 1;

    if ( meta.getValue() != null && meta.getValue().length > 0 ) {

      if ( meta.isGetCloserValue()
        || ( meta.getAlgorithmType() == FuzzyMatchMeta.OPERATION_TYPE_DOUBLE_METAPHONE )
        || ( meta.getAlgorithmType() == FuzzyMatchMeta.OPERATION_TYPE_SOUNDEX )
        || ( meta.getAlgorithmType() == FuzzyMatchMeta.OPERATION_TYPE_REFINED_SOUNDEX )
        || ( meta.getAlgorithmType() == FuzzyMatchMeta.OPERATION_TYPE_METAPHONE ) ) {
        // cache also additional fields
        data.addAdditionalFields = true;
        nrFields += meta.getValue().length;
      }
    }
    data.indexOfCachedFields = new int[nrFields];

    switch ( meta.getAlgorithmType() ) {
      case FuzzyMatchMeta.OPERATION_TYPE_LEVENSHTEIN:
      case FuzzyMatchMeta.OPERATION_TYPE_DAMERAU_LEVENSHTEIN:
      case FuzzyMatchMeta.OPERATION_TYPE_NEEDLEMAN_WUNSH:
        data.minimalDistance = Const.toInt( environmentSubstitute( meta.getMinimalValue() ), 0 );
        if ( isDetailed() ) {
          logDetailed( BaseMessages.getString( PKG, "FuzzyMatch.Log.MinimalDistance", data.minimalDistance ) );
        }
        data.maximalDistance = Const.toInt( environmentSubstitute( meta.getMaximalValue() ), 5 );
        if ( isDetailed() ) {
          logDetailed( BaseMessages.getString( PKG, "FuzzyMatch.Log.MaximalDistance", data.maximalDistance ) );
        }
        if ( !meta.isGetCloserValue() ) {
          data.valueSeparator = environmentSubstitute( meta.getSeparator() );
          if ( isDetailed() ) {
            logDetailed( BaseMessages.getString( PKG, "FuzzyMatch.Log.Separator", data.valueSeparator ) );
          }
        }
        break;
      case FuzzyMatchMeta.OPERATION_TYPE_JARO:
      case FuzzyMatchMeta.OPERATION_TYPE_JARO_WINKLER:
      case FuzzyMatchMeta.OPERATION_TYPE_PAIR_SIMILARITY:
        data.minimalSimilarity = Const.toDouble( environmentSubstitute( meta.getMinimalValue() ), 0 );
        if ( isDetailed() ) {
          logDetailed( BaseMessages.getString( PKG, "FuzzyMatch.Log.MinimalSimilarity", data.minimalSimilarity ) );
        }
        data.maximalSimilarity = Const.toDouble( environmentSubstitute( meta.getMaximalValue() ), 1 );
        if ( isDetailed() ) {
          logDetailed( BaseMessages.getString( PKG, "FuzzyMatch.Log.MaximalSimilarity", data.maximalSimilarity ) );
        }
        if ( !meta.isGetCloserValue() ) {
          data.valueSeparator = environmentSubstitute( meta.getSeparator() );
          if ( isDetailed() ) {
            logDetailed( BaseMessages.getString( PKG, "FuzzyMatch.Log.Separator", data.valueSeparator ) );
          }
        }
        break;
      default:
        break;
    }

    data.readLookupValues = true;

    return true;
  }
  return false;
}
 
Example 5
Source File: FuzzyMatchDialog.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
private void activeAlgorithm() {
  boolean enable =
    ( FuzzyMatchMeta.getAlgorithmTypeByDesc( wAlgorithm.getText() )
        == FuzzyMatchMeta.OPERATION_TYPE_LEVENSHTEIN
      || FuzzyMatchMeta.getAlgorithmTypeByDesc( wAlgorithm.getText() )
        == FuzzyMatchMeta.OPERATION_TYPE_NEEDLEMAN_WUNSH
      || FuzzyMatchMeta.getAlgorithmTypeByDesc( wAlgorithm.getText() )
        == FuzzyMatchMeta.OPERATION_TYPE_DAMERAU_LEVENSHTEIN
      || FuzzyMatchMeta.getAlgorithmTypeByDesc( wAlgorithm.getText() )
        == FuzzyMatchMeta.OPERATION_TYPE_JARO
      || FuzzyMatchMeta.getAlgorithmTypeByDesc( wAlgorithm.getText() )
        == FuzzyMatchMeta.OPERATION_TYPE_JARO_WINKLER || FuzzyMatchMeta
      .getAlgorithmTypeByDesc( wAlgorithm.getText() ) == FuzzyMatchMeta.OPERATION_TYPE_PAIR_SIMILARITY );

  wlgetCloserValue.setEnabled( enable );
  wgetCloserValue.setEnabled( enable );
  wlminValue.setEnabled( enable );
  wminValue.setEnabled( enable );
  wlmaxValue.setEnabled( enable );
  wmaxValue.setEnabled( enable );

  if ( FuzzyMatchMeta.getAlgorithmTypeByDesc( wAlgorithm.getText() )
      == FuzzyMatchMeta.OPERATION_TYPE_JARO
    || FuzzyMatchMeta.getAlgorithmTypeByDesc( wAlgorithm.getText() )
      == FuzzyMatchMeta.OPERATION_TYPE_JARO_WINKLER
    || FuzzyMatchMeta.getAlgorithmTypeByDesc( wAlgorithm.getText() )
      == FuzzyMatchMeta.OPERATION_TYPE_PAIR_SIMILARITY ) {
    if ( Const.toDouble( transMeta.environmentSubstitute( wminValue.getText() ), 0 ) > 1 ) {
      wminValue.setText( String.valueOf( 1 ) );
    }
    if ( Const.toDouble( transMeta.environmentSubstitute( wmaxValue.getText() ), 0 ) > 1 ) {
      wmaxValue.setText( String.valueOf( 1 ) );
    }
  }

  boolean enableCaseSensitive =
    ( FuzzyMatchMeta.getAlgorithmTypeByDesc( wAlgorithm.getText() )
    == FuzzyMatchMeta.OPERATION_TYPE_LEVENSHTEIN || FuzzyMatchMeta
      .getAlgorithmTypeByDesc( wAlgorithm.getText() ) == FuzzyMatchMeta.OPERATION_TYPE_DAMERAU_LEVENSHTEIN );
  wlcaseSensitive.setEnabled( enableCaseSensitive );
  wcaseSensitive.setEnabled( enableCaseSensitive );
  activegetCloserValue();
}
 
Example 6
Source File: ValueNumber.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
@Override
public void setString( String string ) {
  this.number = Const.toDouble( string, 0.0 );
}
 
Example 7
Source File: ValueString.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
@Override
public double getNumber() {
  return Const.toDouble( string, 0.0 );
}
 
Example 8
Source File: Database.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public void cleanupLogRecords( LogTableCoreInterface logTable ) throws KettleDatabaseException {
  double timeout = Const.toDouble( Const.trim( environmentSubstitute( logTable.getTimeoutInDays() ) ), 0.0 );
  if ( timeout < 0.000001 ) {
    // The timeout has to be at least a few seconds, otherwise we don't
    // bother
    return;
  }

  String schemaTable =
    databaseMeta.getQuotedSchemaTableCombination( environmentSubstitute( logTable.getActualSchemaName() ),
      environmentSubstitute( logTable.getActualTableName() ) );

  if ( schemaTable.isEmpty() ) {
    //we can't process without table name
    DatabaseLogExceptionFactory.getExceptionStrategy( logTable )
      .registerException( log, PKG, "DatabaseMeta.Error.LogTableNameNotFound" );
  }

  LogTableField logField = logTable.getLogDateField();
  if ( logField == null ) {
    //can't stand without logField
    DatabaseLogExceptionFactory.getExceptionStrategy( logTable )
      .registerException( log, PKG, "Database.Exception.LogTimeoutDefinedOnTableWithoutLogField" );
  }

  String sql =
    "DELETE FROM " + schemaTable + " WHERE " + databaseMeta.quoteField( logField.getFieldName() ) + " < ?";
  long now = System.currentTimeMillis();
  long limit = now - Math.round( timeout * 24 * 60 * 60 * 1000 );
  RowMetaAndData row = new RowMetaAndData();
  row.addValue( logField.getFieldName(), ValueMetaInterface.TYPE_DATE, new Date( limit ) );

  try {
    //fire database
    execStatement( sql, row.getRowMeta(), row.getData() );
  } catch ( Exception e ) {
    DatabaseLogExceptionFactory.getExceptionStrategy( logTable )
      .registerException( log, PKG, "Database.Exception.UnableToCleanUpOlderRecordsFromLogTable",
        environmentSubstitute( logTable.getActualTableName() ) );
  }
}