Java Code Examples for org.pentaho.di.core.row.value.ValueMetaBase#convertStringToBoolean()

The following examples show how to use org.pentaho.di.core.row.value.ValueMetaBase#convertStringToBoolean() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: NullIfMeta.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
/**
 * @param fieldValue
 *          The fieldValue to set.
 */
public void setFieldValue( String fieldValue ) {
  Boolean isEmptyAndNullDiffer = ValueMetaBase.convertStringToBoolean(
    Const.NVL( System.getProperty( Const.KETTLE_EMPTY_STRING_DIFFERS_FROM_NULL, "N" ), "N" ) );

  this.fieldValue = fieldValue == null && isEmptyAndNullDiffer ? Const.EMPTY_STRING : fieldValue;
}
 
Example 2
Source File: TextFileInputUtils.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
/**
 *
 * Returns in the first position a line; ;
 * on the second position how many lines from file were read to get a full line
 *
 */
public static final TextFileLine getLine( LogChannelInterface log, InputStreamReader reader, EncodingType encodingType,
                                    int fileFormatType, StringBuilder line, String regex, long lineNumberInFile )
  throws KettleFileException {

  String sline = getLine( log, reader, encodingType, fileFormatType, line );

  boolean lenientEnclosureHandling = ValueMetaBase.convertStringToBoolean(
    Const.NVL( EnvUtil.getSystemProperty( Const.KETTLE_COMPATIBILITY_TEXT_FILE_INPUT_USE_LENIENT_ENCLOSURE_HANDLING ), "N" ) );

  if ( !lenientEnclosureHandling ) {

    while ( sline != null ) {
    /*
    Check that the number of enclosures in a line is even.
    If not even it means that there was an enclosed line break.
    We need to read the next line(s) to get the remaining data in this row.
    */
      if ( checkPattern( sline, regex ) % 2 == 0 ) {
        return new TextFileLine( sline, lineNumberInFile, null );
      }

      String nextLine = getLine( log, reader, encodingType, fileFormatType, line );

      if ( nextLine == null ) {
        break;
      }

      sline = sline + nextLine;
      lineNumberInFile++;
    }

  }
  return new TextFileLine( sline, lineNumberInFile, null );
}
 
Example 3
Source File: DatabaseMeta.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@Override
public boolean getBooleanValueOfVariable( String variableName, boolean defaultValue ) {
  if ( !Utils.isEmpty( variableName ) ) {
    String value = environmentSubstitute( variableName );
    if ( !Utils.isEmpty( value ) ) {
      return ValueMetaBase.convertStringToBoolean( value );
    }
  }
  return defaultValue;
}
 
Example 4
Source File: Database.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@Override
public boolean getBooleanValueOfVariable( String variableName, boolean defaultValue ) {
  if ( !Utils.isEmpty( variableName ) ) {
    String value = environmentSubstitute( variableName );
    if ( !Utils.isEmpty( value ) ) {
      return ValueMetaBase.convertStringToBoolean( value );
    }
  }
  return defaultValue;
}
 
Example 5
Source File: Variables.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@Override
public boolean getBooleanValueOfVariable( String variableName, boolean defaultValue ) {
  if ( !Utils.isEmpty( variableName ) ) {
    String value = environmentSubstitute( variableName );
    if ( !Utils.isEmpty( value ) ) {
      return ValueMetaBase.convertStringToBoolean( value );
    }
  }
  return defaultValue;
}
 
Example 6
Source File: S3CsvInputDialog.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
private void setAwsCredentials( S3CsvInputMeta meta ) {
  /* For legacy transformations containing AWS S3 access credentials, {@link Const#KETTLE_USE_AWS_DEFAULT_CREDENTIALS} can force Spoon to use
   * the Amazon Default Credentials Provider Chain instead of using the credentials embedded in the transformation metadata. */
  if ( !ValueMetaBase.convertStringToBoolean(
    Const.NVL( EnvUtil.getSystemProperty( Const.KETTLE_USE_AWS_DEFAULT_CREDENTIALS ), "N" ) ) ) {
    meta.setAwsAccessKey( transMeta.environmentSubstitute( Const.NVL( inputMeta.getAwsAccessKey(), "" ) ) );
    meta.setAwsSecretKey( transMeta.environmentSubstitute( Const.NVL( inputMeta.getAwsSecretKey(), "" ) ) );
  }
}
 
Example 7
Source File: S3CsvInputMeta.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
/**
 * For legacy transformations containing AWS S3 access credentials, {@link Const#KETTLE_USE_AWS_DEFAULT_CREDENTIALS} can force Spoon to use
 * the Amazon Default Credentials Provider Chain instead of using the credentials embedded in the transformation metadata.
 *
 * @return true if {@link Const#KETTLE_USE_AWS_DEFAULT_CREDENTIALS} is true or AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY are not specified
 */
public boolean getUseAwsDefaultCredentials() {
  if ( ValueMetaBase.convertStringToBoolean( Const.NVL( EnvUtil.getSystemProperty( Const.KETTLE_USE_AWS_DEFAULT_CREDENTIALS ), "N" ) ) ) {
    return true;
  } else if ( StringUtil.isEmpty( awsAccessKey ) && StringUtil.isEmpty( awsSecretKey ) ) {
    return true;
  }
  return false;
}
 
Example 8
Source File: SelectValuesMeta.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public void getMetadataFields( RowMetaInterface inputRowMeta, String name, VariableSpace space ) throws KettlePluginException {
  if ( meta != null && meta.length > 0 ) {
    // METADATA mode: change the meta-data of the values mentioned...

    for ( int i = 0; i < meta.length; i++ ) {
      SelectMetadataChange metaChange = meta[i];

      int idx = inputRowMeta.indexOfValue( metaChange.getName() );
      boolean metaTypeChangeUsesNewTypeDefaults = false; // Normal behavior as of 5.x or so
      if ( space != null ) {
        metaTypeChangeUsesNewTypeDefaults = ValueMetaBase.convertStringToBoolean(
            space.getVariable( Const.KETTLE_COMPATIBILITY_SELECT_VALUES_TYPE_CHANGE_USES_TYPE_DEFAULTS, "N" ) );
      }
      if ( idx >= 0 ) { // We found the value

        // This is the value we need to change:
        ValueMetaInterface v = inputRowMeta.getValueMeta( idx );

        // Do we need to rename ?
        if ( !v.getName().equals( metaChange.getRename() ) && !Utils.isEmpty( metaChange.getRename() ) ) {
          v.setName( metaChange.getRename() );
          v.setOrigin( name );
          // need to reinsert to check name conflicts
          inputRowMeta.setValueMeta( idx, v );
        }
        // Change the type?
        if ( metaChange.getType() != ValueMetaInterface.TYPE_NONE && v.getType() != metaChange.getType() ) {
          // Fix for PDI-16388 - clone copies over the conversion mask instead of using the default for the new type
          if ( !metaTypeChangeUsesNewTypeDefaults ) {
            v = ValueMetaFactory.cloneValueMeta( v, metaChange.getType() );
          } else {
            v = ValueMetaFactory.createValueMeta( v.getName(), metaChange.getType() );
          }

          // This is now a copy, replace it in the row!
          //
          inputRowMeta.setValueMeta( idx, v );

          // This also moves the data to normal storage type
          //
          v.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL );
        }
        if ( metaChange.getLength() != UNDEFINED ) {
          v.setLength( metaChange.getLength() );
          v.setOrigin( name );
        }
        if ( metaChange.getPrecision() != UNDEFINED ) {
          v.setPrecision( metaChange.getPrecision() );
          v.setOrigin( name );
        }
        if ( metaChange.getStorageType() >= 0 ) {
          v.setStorageType( metaChange.getStorageType() );
          v.setOrigin( name );
        }
        if ( !Utils.isEmpty( metaChange.getConversionMask() ) ) {
          v.setConversionMask( metaChange.getConversionMask() );
          v.setOrigin( name );
        }

        v.setDateFormatLenient( metaChange.isDateFormatLenient() );
        v.setDateFormatLocale( EnvUtil.createLocale( metaChange.getDateFormatLocale() ) );
        v.setDateFormatTimeZone( EnvUtil.createTimeZone( metaChange.getDateFormatTimeZone() ) );
        v.setLenientStringToNumber( metaChange.isLenientStringToNumber() );

        if ( !Utils.isEmpty( metaChange.getEncoding() ) ) {
          v.setStringEncoding( metaChange.getEncoding() );
          v.setOrigin( name );
        }
        if ( !Utils.isEmpty( metaChange.getDecimalSymbol() ) ) {
          v.setDecimalSymbol( metaChange.getDecimalSymbol() );
          v.setOrigin( name );
        }
        if ( !Utils.isEmpty( metaChange.getGroupingSymbol() ) ) {
          v.setGroupingSymbol( metaChange.getGroupingSymbol() );
          v.setOrigin( name );
        }
        if ( !Utils.isEmpty( metaChange.getCurrencySymbol() ) ) {
          v.setCurrencySymbol( metaChange.getCurrencySymbol() );
          v.setOrigin( name );
        }
      }
    }
  }
}
 
Example 9
Source File: DatabaseLookup.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
private boolean shouldDatabaseReturnValueTypeBeUsed() {
  String skipLookupReturnFields = getVariable( Const.KETTLE_COMPATIBILITY_DB_LOOKUP_USE_FIELDS_RETURN_TYPE_CHOSEN_IN_UI, "N" );
  return !ValueMetaBase.convertStringToBoolean( Const.NVL( skipLookupReturnFields, "N" ) );
}
 
Example 10
Source File: Denormaliser.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
private boolean processFirstRow() throws KettleStepException {
  String val = getVariable( Const.KETTLE_AGGREGATION_ALL_NULLS_ARE_ZERO, "N" );
  this.allNullsAreZero = ValueMetaBase.convertStringToBoolean( val );
  val = getVariable( Const.KETTLE_AGGREGATION_MIN_NULL_IS_VALUED, "N" );
  this.minNullIsValued = ValueMetaBase.convertStringToBoolean( val );
  data.inputRowMeta = getInputRowMeta();
  data.outputRowMeta = data.inputRowMeta.clone();
  meta.getFields( data.outputRowMeta, getStepname(), null, null, this, repository, metaStore );

  data.keyFieldNr = data.inputRowMeta.indexOfValue( meta.getKeyField() );
  if ( data.keyFieldNr < 0 ) {
    logError( BaseMessages.getString( PKG, "Denormaliser.Log.KeyFieldNotFound", meta.getKeyField() ) );
    setErrors( 1 );
    stopAll();
    return false;
  }

  Map<Integer, Integer> subjects = new Hashtable<Integer, Integer>();
  data.fieldNameIndex = new int[meta.getDenormaliserTargetField().length];
  for ( int i = 0; i < meta.getDenormaliserTargetField().length; i++ ) {
    DenormaliserTargetField field = meta.getDenormaliserTargetField()[i];
    int idx = data.inputRowMeta.indexOfValue( field.getFieldName() );
    if ( idx < 0 ) {
      logError( BaseMessages.getString( PKG, "Denormaliser.Log.UnpivotFieldNotFound", field.getFieldName() ) );
      setErrors( 1 );
      stopAll();
      return false;
    }
    data.fieldNameIndex[i] = idx;
    subjects.put( Integer.valueOf( idx ), Integer.valueOf( idx ) );

    // See if by accident, the value fieldname isn't the same as the key fieldname.
    // This is not supported of-course and given the complexity of the step, you can miss:
    if ( data.fieldNameIndex[i] == data.keyFieldNr ) {
      logError( BaseMessages.getString( PKG, "Denormaliser.Log.ValueFieldSameAsKeyField", field.getFieldName() ) );
      setErrors( 1 );
      stopAll();
      return false;
    }

    // Fill a hashtable with the key strings and the position(s) of the field(s) in the row to take.
    // Store the indexes in a List so that we can accommodate multiple key/value pairs...
    //
    String keyValue = environmentSubstitute( field.getKeyValue() );
    List<Integer> indexes = data.keyValue.get( keyValue );
    if ( indexes == null ) {
      indexes = new ArrayList<Integer>( 2 );
    }
    indexes.add( Integer.valueOf( i ) ); // Add the index to the list...
    data.keyValue.put( keyValue, indexes ); // store the list
  }

  Set<Integer> subjectSet = subjects.keySet();
  data.fieldNrs = subjectSet.toArray( new Integer[subjectSet.size()] );

  data.groupnrs = new int[meta.getGroupField().length];
  for ( int i = 0; i < meta.getGroupField().length; i++ ) {
    data.groupnrs[i] = data.inputRowMeta.indexOfValue( meta.getGroupField()[i] );
    if ( data.groupnrs[i] < 0 ) {
      logError( BaseMessages.getString( PKG, "Denormaliser.Log.GroupingFieldNotFound", meta.getGroupField()[i] ) );
      setErrors( 1 );
      stopAll();
      return false;
    }
  }

  List<Integer> removeList = new ArrayList<Integer>();
  removeList.add( Integer.valueOf( data.keyFieldNr ) );
  for ( int i = 0; i < data.fieldNrs.length; i++ ) {
    removeList.add( data.fieldNrs[i] );
  }
  Collections.sort( removeList );

  data.removeNrs = new int[removeList.size()];
  for ( int i = 0; i < removeList.size(); i++ ) {
    data.removeNrs[i] = removeList.get( i );
  }
  return true;
}
 
Example 11
Source File: XMLOutput.java    From pentaho-kettle with Apache License 2.0 3 votes vote down vote up
private boolean isNullValueAllowed( int valueMetaType ) {

    //Check if retro compatibility is set or not, to guaranty compatibility with older versions.
    //In 6.1 null values were written with string "null". Since then the attribute is not written.

    String val = getVariable( Const.KETTLE_COMPATIBILITY_XML_OUTPUT_NULL_VALUES, "N" );

    return ValueMetaBase.convertStringToBoolean( Const.NVL( val, "N" ) ) && valueMetaType == ValueMetaInterface.TYPE_STRING;
  }