Java Code Examples for org.pentaho.di.core.row.ValueMetaInterface#getBinaryString()

The following examples show how to use org.pentaho.di.core.row.ValueMetaInterface#getBinaryString() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TextFileOutput.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
private byte[] formatField( ValueMetaInterface v, Object valueData ) throws KettleValueException {
  if ( v.isString() ) {
    if ( v.isStorageBinaryString() && v.getTrimType() == ValueMetaInterface.TRIM_TYPE_NONE && v.getLength() < 0
        && Utils.isEmpty( v.getStringEncoding() ) ) {
      return (byte[]) valueData;
    } else {
      String svalue = ( valueData instanceof String ) ? (String) valueData : v.getString( valueData );
      return convertStringToBinaryString( v, Const.trimToType( svalue, v.getTrimType() ) );
    }
  } else {
    return v.getBinaryString( valueData );
  }
}
 
Example 2
Source File: IngresVectorwiseLoader.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
private void writeRowToBulk( RowMetaInterface rowMeta, Object[] r ) throws KettleException {

    try {
      // So, we have this output stream to which we can write CSV data to.
      // Basically, what we need to do is write the binary data (from strings to
      // it as part of this proof of concept)
      //
      // The data format required is essentially "value|value|value|value"
      // new feature implemented
      // "use SSV which requires the format to be '"value";"value","value"'
      byte[] delimiter;
      if ( meta.isUseSSV() ) {
        delimiter = data.semicolon;
      } else {
        delimiter = data.separator;
      }

      for ( int i = 0; i < data.keynrs.length; i++ ) {
        if ( i > 0 ) {
          // Write a separator
          //
          write( delimiter );
        }

        int index = data.keynrs[i];
        ValueMetaInterface valueMeta = rowMeta.getValueMeta( index );
        Object valueData = r[index];

        if ( valueData != null ) {
          if ( valueMeta.isStorageBinaryString() ) {
            byte[] value = valueMeta.getBinaryString( valueData );
            write( value );
          } else {
            // We're using the bulk row metadata so dates and numerics should be in the correct format now...
            //
            String string = valueMeta.getString( valueData );
            if ( string != null ) {

              if ( meta.isEscapingSpecialCharacters() && valueMeta.isString() ) {
                string = replace( string, new String[] { "\n", "\r", }, new String[] { "\\n", "\\r", } );
              }
              // support of SSV feature
              //
              if ( meta.isUseSSV() ) {

                // replace " in string fields
                //
                if ( meta.isEscapingSpecialCharacters() && valueMeta.isString() ) {
                  string = replace( string, new String[] { "\"" }, new String[] { "\\\"" } );
                  log.logRowlevel( "\' \" \' symbol was added for the future processing" );
                }
                write( data.doubleQuote );
                write( data.getBytes( string ) );
                write( data.doubleQuote );
              } else {
                write( data.getBytes( string ) );
              }
            }
          }
        }
      }

      // finally write a newline
      //
      write( data.newline );
    } catch ( Exception e ) {
      // If something went wrong with the import,
      // rather return that error, in stead of "Pipe Broken"
      try {
        data.sqlRunner.checkExcn();
      } catch ( Exception loadEx ) {
        throw new KettleException( "Error serializing rows of data to the fifo file", loadEx );
      }

      throw new KettleException( "Error serializing rows of data to the fifo file", e );
    }

  }