Java Code Examples for org.pentaho.di.core.RowMetaAndData#size()

The following examples show how to use org.pentaho.di.core.RowMetaAndData#size() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: FilesFromResultMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
  VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException {

  // Add the fields from a ResultFile
  try {
    ResultFile resultFile =
      new ResultFile(
        ResultFile.FILE_TYPE_GENERAL, KettleVFS.getFileObject( "foo.bar", space ), "parentOrigin", "origin" );
    RowMetaAndData add = resultFile.getRow();

    // Set the origin on the fields...
    for ( int i = 0; i < add.size(); i++ ) {
      add.getValueMeta( i ).setOrigin( name );
    }
    r.addRowMeta( add.getRowMeta() );
  } catch ( KettleFileException e ) {
    throw new KettleStepException( e );
  }
}
 
Example 2
Source File: JavaScriptStringIT.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
/**
 * Check the 2 lists comparing the rows in order. If they are not the same fail the test.
 */
public void checkRows( List<RowMetaAndData> rows1, List<RowMetaAndData> rows2 ) {
  int idx = 1;
  if ( rows1.size() != rows2.size() ) {
    fail( "Number of rows is not the same: " + rows1.size() + " and " + rows2.size() );
  }
  Iterator<RowMetaAndData> it1 = rows1.iterator();
  Iterator<RowMetaAndData> it2 = rows2.iterator();

  while ( it1.hasNext() && it2.hasNext() ) {
    RowMetaAndData rm1 = it1.next();
    RowMetaAndData rm2 = it2.next();

    Object[] r1 = rm1.getData();
    Object[] r2 = rm2.getData();

    if ( rm1.size() != rm2.size() ) {
      fail( "row nr " + idx + " is not equal" );
    }
    int[] fields = new int[ rm1.size() ];
    for ( int ydx = 0; ydx < rm1.size(); ydx++ ) {
      fields[ ydx ] = ydx;
    }
    try {
      if ( rm1.getRowMeta().compare( r1, r2, fields ) != 0 ) {
        fail( "row nr " + idx + " is not equal" );
      }
    } catch ( KettleValueException e ) {
      fail( "row nr " + idx + " is not equal" );
    }

    idx++;
  }
}
 
Example 3
Source File: TableInputIT.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
/**
 * Check the 2 lists comparing the rows in order. If they are not the same fail the test.
 */
public void checkRows( List<RowMetaAndData> rows1, List<RowMetaAndData> rows2 ) {
  int idx = 1;
  if ( rows1.size() != rows2.size() ) {
    fail( "Number of rows is not the same: " + rows1.size() + " and " + rows2.size() );
  }
  Iterator<RowMetaAndData> it1 = rows1.iterator();
  Iterator<RowMetaAndData> it2 = rows2.iterator();

  while ( it1.hasNext() && it2.hasNext() ) {
    RowMetaAndData rm1 = it1.next();
    RowMetaAndData rm2 = it2.next();

    Object[] r1 = rm1.getData();
    Object[] r2 = rm2.getData();

    if ( rm1.size() != rm2.size() ) {
      fail( "row nr " + idx + " is not equal" );
    }
    int[] fields = new int[r1.length];
    for ( int ydx = 0; ydx < r1.length; ydx++ ) {
      fields[ydx] = ydx;
    }
    try {
      if ( rm1.getRowMeta().compare( r1, r2, fields ) != 0 ) {
        fail( "row nr " + idx + " is not equal" );
      }
    } catch ( KettleValueException e ) {
      fail( "row nr " + idx + " is not equal" );
    }

    idx++;
  }
}
 
Example 4
Source File: NullIfIT.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
/**
 * Check the 2 lists comparing the rows in order. If they are not the same fail the test.
 */
public void checkRows( List<RowMetaAndData> rows1, List<RowMetaAndData> rows2 ) {
  int idx = 1;
  if ( rows1.size() != rows2.size() ) {
    fail( "Number of rows is not the same: " + rows1.size() + " and " + rows2.size() );
  }
  Iterator<RowMetaAndData> it1 = rows1.iterator();
  Iterator<RowMetaAndData> it2 = rows2.iterator();

  while ( it1.hasNext() && it2.hasNext() ) {
    RowMetaAndData rm1 = it1.next();
    RowMetaAndData rm2 = it2.next();

    Object[] r1 = rm1.getData();
    Object[] r2 = rm2.getData();

    if ( rm1.size() != rm2.size() ) {
      fail( "row nr " + idx + " is not equal" );
    }
    int[] fields = new int[r1.length];
    for ( int ydx = 0; ydx < r1.length; ydx++ ) {
      fields[ydx] = ydx;
    }
    try {
      if ( rm1.getRowMeta().compare( r1, r2, fields ) != 0 ) {
        fail( "row nr " + idx + " is not equal" );
      }
    } catch ( KettleValueException e ) {
      fail( "row nr " + idx + " is not equal" );
    }

    idx++;
  }
}
 
Example 5
Source File: JobEntryDeleteFiles.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
/**
 * For job execution path to files and file masks should be provided.
 * These values can be obtained in two ways:
 * 1. As an argument of a current job entry
 * 2. As a table, that comes as a result of execution previous job/transformation.
 *
 * As the logic of processing this data is the same for both of this cases, we first
 * populate this data (in this method) and then process it.
 *
 * We are using guava multimap here, because if allows key duplication and there could be a
 * situation where two paths to one folder with different wildcards are provided.
 */
private Multimap<String, String> populateDataForJobExecution( List<RowMetaAndData> rowsFromPreviousMeta ) throws KettleValueException {
  Multimap<String, String> pathToMaskMap = ArrayListMultimap.create();
  if ( argFromPrevious && rowsFromPreviousMeta != null ) {
    for ( RowMetaAndData resultRow : rowsFromPreviousMeta ) {
      if ( resultRow.size() < 2 ) {
        logError( BaseMessages.getString(
          PKG, "JobDeleteFiles.Error.InvalidNumberOfRowsFromPrevMeta", resultRow.size() ) );
        return pathToMaskMap;
      }
      String pathToFile = resultRow.getString( 0, null );
      String fileMask = resultRow.getString( 1, null );

      if ( log.isDetailed() ) {
        logDetailed( BaseMessages.getString(
          PKG, "JobEntryDeleteFiles.ProcessingRow", pathToFile, fileMask ) );
      }

      pathToMaskMap.put( pathToFile, fileMask );
    }
  } else if ( arguments != null ) {
    for ( int i = 0; i < arguments.length; i++ ) {
      if ( log.isDetailed() ) {
        logDetailed( BaseMessages.getString(
          PKG, "JobEntryDeleteFiles.ProcessingArg", arguments[ i ], filemasks[ i ] ) );
      }
      pathToMaskMap.put( arguments[ i ], filemasks[ i ] );
    }
  }

  return pathToMaskMap;
}
 
Example 6
Source File: CsvInputBase.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
/**
 * Check the 2 lists comparing the rows in order. If they are not the same fail the test.
 *
 * @param rows1
 *          set 1 of rows to compare
 * @param rows2
 *          set 2 of rows to compare
 * @param fileNameColumn
 *          Number of the column containing the filename. This is only checked for being non-null (some systems maybe
 *          canonize names differently than we input).
 */
public void checkRows( List<RowMetaAndData> rows1, List<RowMetaAndData> rows2, int fileNameColumn ) {
  int idx = 1;
  if ( rows1.size() != rows2.size() ) {
    fail( "Number of rows is not the same: " + rows1.size() + " and " + rows2.size() );
  }
  Iterator<RowMetaAndData> it1 = rows1.iterator();
  Iterator<RowMetaAndData> it2 = rows2.iterator();

  while ( it1.hasNext() && it2.hasNext() ) {
    RowMetaAndData rm1 = it1.next();
    RowMetaAndData rm2 = it2.next();

    Object[] r1 = rm1.getData();
    Object[] r2 = rm2.getData();

    if ( rm1.size() != rm2.size() ) {
      fail( "row nr " + idx + " is not equal" );
    }
    int[] fields = new int[r1.length];
    for ( int ydx = 0; ydx < r1.length; ydx++ ) {
      fields[ydx] = ydx;
    }
    try {
      r1[fileNameColumn] = r2[fileNameColumn];
      if ( rm1.getRowMeta().compare( r1, r2, fields ) != 0 ) {
        fail( "row nr " + idx + " is not equal" );
      }
    } catch ( KettleValueException e ) {
      fail( "row nr " + idx + " is not equal" );
    }

    idx++;
  }
}
 
Example 7
Source File: AddSequenceIT.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
/**
 * Check the 2 lists comparing the rows in order. If they are not the same fail the test.
 */
public void checkRows( List<RowMetaAndData> rows1, List<RowMetaAndData> rows2 ) {
  int idx = 1;
  if ( rows1.size() != rows2.size() ) {
    fail( "Number of rows is not the same: " + rows1.size() + " and " + rows2.size() );
  }
  Iterator<RowMetaAndData> it1 = rows1.iterator();
  Iterator<RowMetaAndData> it2 = rows2.iterator();

  while ( it1.hasNext() && it2.hasNext() ) {
    RowMetaAndData rm1 = it1.next();
    RowMetaAndData rm2 = it2.next();

    Object[] r1 = rm1.getData();
    Object[] r2 = rm2.getData();

    if ( rm1.size() != rm2.size() ) {
      fail( "row nr " + idx + " is not equal" );
    }
    int[] fields = new int[rm1.size()];
    for ( int ydx = 0; ydx < rm1.size(); ydx++ ) {
      fields[ydx] = ydx;
    }
    try {
      if ( rm1.getRowMeta().compare( r1, r2, fields ) != 0 ) {
        fail( "row nr " + idx + " is not equal" );
      }
    } catch ( KettleValueException e ) {
      fail( "row nr " + idx + " is not equal" );
    }

    idx++;
  }
}
 
Example 8
Source File: ValueMapperIT.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
/**
 * Check the 2 lists comparing the rows in order. If they are not the same fail the test.
 */
public void checkRows( List<RowMetaAndData> rows1, List<RowMetaAndData> rows2 ) {
  int idx = 1;
  if ( rows1.size() != rows2.size() ) {
    fail( "Number of rows is not the same: " + rows1.size() + " and " + rows2.size() );
  }
  Iterator<RowMetaAndData> it1 = rows1.iterator();
  Iterator<RowMetaAndData> it2 = rows2.iterator();

  while ( it1.hasNext() && it2.hasNext() ) {
    RowMetaAndData rm1 = it1.next();
    RowMetaAndData rm2 = it2.next();

    Object[] r1 = rm1.getData();
    Object[] r2 = rm2.getData();

    if ( rm1.size() != rm2.size() ) {
      fail( "row nr " + idx + " is not equal" );
    }
    int[] fields = new int[r1.length];
    for ( int ydx = 0; ydx < r1.length; ydx++ ) {
      fields[ydx] = ydx;
    }
    try {
      if ( rm1.getRowMeta().compare( r1, r2, fields ) != 0 ) {
        fail( "row nr " + idx + " is not equal" );
      }
    } catch ( KettleValueException e ) {
      fail( "row nr " + idx + " is not equal" );
    }

    idx++;
  }
}
 
Example 9
Source File: TableOutputIT.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
/**
 * Check the 2 lists comparing the rows in order. If they are not the same fail the test.
 */
public void checkRows( List<RowMetaAndData> rows1, List<RowMetaAndData> rows2 ) {
  int idx = 1;
  if ( rows1.size() != rows2.size() ) {
    fail( "Number of rows is not the same: " + rows1.size() + " and " + rows2.size() );
  }
  Iterator<RowMetaAndData> it1 = rows1.iterator();
  Iterator<RowMetaAndData> it2 = rows2.iterator();

  while ( it1.hasNext() && it2.hasNext() ) {
    RowMetaAndData rm1 = it1.next();
    RowMetaAndData rm2 = it2.next();

    Object[] r1 = rm1.getData();
    Object[] r2 = rm2.getData();

    if ( rm1.size() != rm2.size() ) {
      fail( "row nr " + idx + " is not equal" );
    }
    int[] fields = new int[r1.length];
    for ( int ydx = 0; ydx < r1.length; ydx++ ) {
      fields[ydx] = ydx;
    }
    try {
      if ( rm1.getRowMeta().compare( r1, r2, fields ) != 0 ) {
        fail( "row nr " + idx + " is not equal" );
      }
    } catch ( KettleValueException e ) {
      fail( "row nr " + idx + " is not equal" );
    }

    idx++;
  }
}
 
Example 10
Source File: DatabaseLookupIT.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
/**
 * Check the 2 lists comparing the rows in order. If they are not the same fail the test.
 */
public void checkRows( List<RowMetaAndData> rows1, List<RowMetaAndData> rows2 ) {
  int idx = 1;
  if ( rows1.size() != rows2.size() ) {
    fail( "Number of rows is not the same: " + rows1.size() + " and " + rows2.size() );
  }
  Iterator<RowMetaAndData> it1 = rows1.iterator();
  Iterator<RowMetaAndData> it2 = rows2.iterator();

  while ( it1.hasNext() && it2.hasNext() ) {
    RowMetaAndData rm1 = it1.next();
    RowMetaAndData rm2 = it2.next();

    Object[] r1 = rm1.getData();
    Object[] r2 = rm2.getData();

    if ( rm1.size() != rm2.size() ) {
      fail( "row nr " + idx + " is not equal" );
    }
    int[] fields = new int[r1.length];
    for ( int ydx = 0; ydx < r1.length; ydx++ ) {
      fields[ydx] = ydx;
    }
    try {
      if ( rm1.getRowMeta().compare( r1, r2, fields ) != 0 ) {
        fail( "row nr " + idx + " is not equal" );
      }
    } catch ( KettleValueException e ) {
      fail( "row nr " + idx + " is not equal" );
    }

    idx++;
  }
}
 
Example 11
Source File: JavaScriptSpecialIT.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
/**
 * Check the 2 lists comparing the rows in order. If they are not the same fail the test.
 */
public void checkRows( List<RowMetaAndData> rows1, List<RowMetaAndData> rows2 ) {
  int idx = 1;
  if ( rows1.size() != rows2.size() ) {
    fail( "Number of rows is not the same: " + rows1.size() + " and " + rows2.size() );
  }
  Iterator<RowMetaAndData> it1 = rows1.iterator();
  Iterator<RowMetaAndData> it2 = rows2.iterator();

  while ( it1.hasNext() && it2.hasNext() ) {
    RowMetaAndData rm1 = it1.next();
    RowMetaAndData rm2 = it2.next();

    Object[] r1 = rm1.getData();
    Object[] r2 = rm2.getData();

    if ( rm1.size() != rm2.size() ) {
      fail( "row nr " + idx + " is not equal" );
    }
    int[] fields = new int[ rm1.size() ];
    for ( int ydx = 0; ydx < rm1.size(); ydx++ ) {
      fields[ ydx ] = ydx;
    }
    try {
      if ( rm1.getRowMeta().compare( r1, r2, fields ) != 0 ) {
        fail( "row nr " + idx + " is not equal" );
      }
    } catch ( KettleValueException e ) {
      fail( "row nr " + idx + " is not equal" );
    }

    idx++;
  }
}
 
Example 12
Source File: DetectLastRowStepIT.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
/**
 * Check the 2 lists comparing the rows in order. If they are not the same fail the test.
 *
 * @param rows1
 *          first row set to compare
 * @param rows2
 *          second row set to compare
 */
public void checkRows( List<RowMetaAndData> rows1, List<RowMetaAndData> rows2 ) {
  int idx = 1;
  if ( rows1.size() != rows2.size() ) {
    fail( "Number of rows is not the same: " + rows1.size() + " and " + rows2.size() );
  }
  Iterator<RowMetaAndData> it1 = rows1.iterator();
  Iterator<RowMetaAndData> it2 = rows2.iterator();

  while ( it1.hasNext() && it2.hasNext() ) {
    RowMetaAndData rm1 = it1.next();
    RowMetaAndData rm2 = it2.next();

    Object[] r1 = rm1.getData();
    Object[] r2 = rm2.getData();

    if ( rm1.size() != rm2.size() ) {
      fail( "row nr " + idx + " is not equal" );
    }
    int[] fields = new int[rm1.size()];
    for ( int ydx = 0; ydx < rm1.size(); ydx++ ) {
      fields[ydx] = ydx;
    }
    try {
      if ( rm1.getRowMeta().compare( r1, r2, fields ) != 0 ) {
        fail( "row nr " + idx + " is not equal" );
      }
    } catch ( KettleValueException e ) {
      fail( "row nr " + idx + " is not equal" );
    }

    idx++;
  }
}
 
Example 13
Source File: XsltTest.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
/**
 * Check the 2 lists comparing the rows in order. If they are not the same fail the test.
 * 
 * @param rows1
 *          set 1 of rows to compare
 * @param rows2
 *          set 2 of rows to compare
 * @param fileNameColumn
 *          Number of the column containing the filename. This is only checked for being non-null (some systems maybe
 *          canonize names differently than we input).
 */
public void checkRows( List<RowMetaAndData> rows1, List<RowMetaAndData> rows2, int fileNameColumn ) {
  int idx = 1;
  if ( rows1.size() != rows2.size() ) {
    fail( "Number of rows is not the same: " + rows1.size() + " and " + rows2.size() );
  }
  Iterator<RowMetaAndData> it1 = rows1.iterator();
  Iterator<RowMetaAndData> it2 = rows2.iterator();

  while ( it1.hasNext() && it2.hasNext() ) {
    RowMetaAndData rm1 = it1.next();
    RowMetaAndData rm2 = it2.next();

    Object[] r1 = rm1.getData();
    Object[] r2 = rm2.getData();

    if ( rm1.size() != rm2.size() ) {
      fail( "row nr " + idx + " is not equal" );
    }
    int[] fields = new int[r1.length];
    for ( int ydx = 0; ydx < r1.length; ydx++ ) {
      fields[ydx] = ydx;
    }
    try {
      r1[fileNameColumn] = r2[fileNameColumn];
      if ( rm1.getRowMeta().compare( r1, r2, fields ) != 0 ) {
        fail( "row nr " + idx + " is not equal" );
      }
    } catch ( KettleValueException e ) {
      fail( "row nr " + idx + " is not equal" );
    }

    idx++;
  }
}
 
Example 14
Source File: PentahoAvroReadWriteTest.java    From pentaho-hadoop-shims with Apache License 2.0 5 votes vote down vote up
@Test
public void testConvertToBytesOnOutput() throws Exception {
  Object[] rowData = new Object[] { "Row1Field1", "Row1Field2", "foobar".getBytes() };
  String[][] outputSchemaDescription = new String[][] {
    { "avroField1", "pentahoField1", String.valueOf( AvroSpec.DataType.BYTES.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_STRING ), "0", "0" },
    { "avroField2", "pentahoField2", String.valueOf( AvroSpec.DataType.BYTES.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_STRING ), "0", "0" },
    { "avroBytes10", "pentahoBinary10", String.valueOf( AvroSpec.DataType.BYTES.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_BINARY ), "0", "0" }
  };

  String[][] inputSchemaDescription = new String[][] {
    { "avroField1", "pentahoField1", String.valueOf( AvroSpec.DataType.BYTES.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_BINARY ) },
    { "avroField2", "pentahoField2", String.valueOf( AvroSpec.DataType.BYTES.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_BINARY ) },
    { "avroBytes10", "pentahoBinary10", String.valueOf( AvroSpec.DataType.BYTES.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_BINARY ) }
  };


  RowMeta rowMeta = buildRowMeta( outputSchemaDescription );
  RowMetaAndData rowMetaAndData = new RowMetaAndData( rowMeta, rowData );

  byte[][] expectedResults = new byte[ rowData.length ][];
  for ( int i = 0; i < rowMetaAndData.size(); i++ ) {
    expectedResults[ i ] = rowMetaAndData.getBinary( i, null );
  }

  doReadWrite( inputSchemaDescription, outputSchemaDescription, rowData,
    IPentahoAvroOutputFormat.COMPRESSION.UNCOMPRESSED, "avroOutputNone.avro", null, expectedResults, true );
}
 
Example 15
Source File: GetXMLDataTest.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
/**
 * Check the 2 lists comparing the rows in order. If they are not the same fail the test.
 * 
 * @param rows1
 *          set 1 of rows to compare
 * @param rows2
 *          set 2 of rows to compare
 */
public void checkRows( List<RowMetaAndData> rows1, List<RowMetaAndData> rows2 ) {
  int idx = 1;
  if ( rows1.size() != rows2.size() ) {
    fail( "Number of rows is not the same: " + rows1.size() + " and " + rows2.size() );
  }
  Iterator<RowMetaAndData> it1 = rows1.iterator();
  Iterator<RowMetaAndData> it2 = rows2.iterator();

  while ( it1.hasNext() && it2.hasNext() ) {
    RowMetaAndData rm1 = it1.next();
    RowMetaAndData rm2 = it2.next();

    Object[] r1 = rm1.getData();
    Object[] r2 = rm2.getData();

    if ( rm1.size() != rm2.size() ) {
      fail( "row nr " + idx + " is not equal" );
    }
    int[] fields = new int[r1.length];
    for ( int ydx = 0; ydx < r1.length; ydx++ ) {
      fields[ydx] = ydx;
    }
    try {
      if ( rm1.getRowMeta().compare( r1, r2, fields ) != 0 ) {
        fail( "row nr " + idx + " is not equal" );
      }
    } catch ( KettleValueException e ) {
      fail( "row nr " + idx + " is not equal" );
    }

    idx++;
  }
}
 
Example 16
Source File: PentahoAvroReadWriteTest.java    From pentaho-hadoop-shims with Apache License 2.0 4 votes vote down vote up
@Test
public void testConvertToStringOnOutput() throws Exception {
  Object[] rowData =
    new Object[] { "Row1Field1", "Row1Field2", new Double( 3.1 ), new BigDecimal( "4.1" ), DEFAULT_INET_ADDR,
      Boolean.TRUE, new Long( 1 ), date1, timeStamp1, "foobar".getBytes() };
  String[][] outputSchemaDescription = new String[][] {
    { "avroField1", "pentahoField1", String.valueOf( AvroSpec.DataType.STRING.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_STRING ), "0", "0" },
    { "avroField2", "pentahoField2", String.valueOf( AvroSpec.DataType.STRING.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_STRING ), "0", "0" },
    { "avroDouble3", "pentahoNumber3", String.valueOf( AvroSpec.DataType.STRING.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_NUMBER ), "0", "0" },
    { "avroDecimal4", "pentahoBigNumber4", String.valueOf( AvroSpec.DataType.STRING.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_BIGNUMBER ), "2", "1" },
    { "avroString5", "pentahoInet5", String.valueOf( AvroSpec.DataType.STRING.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_INET ), "0", "0" },
    { "avroBoolean6", "pentahoBoolean6", String.valueOf( AvroSpec.DataType.STRING.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_BOOLEAN ), "0", "0" },
    { "avroInt7", "pentahoInt7", String.valueOf( AvroSpec.DataType.STRING.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_INTEGER ), "0", "0" },
    { "avroDate8", "pentahoDate8", String.valueOf( AvroSpec.DataType.STRING.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_DATE ), "0", "0" },
    { "avroTimestamp9", "pentahoTimestamp9", String.valueOf( AvroSpec.DataType.STRING.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_TIMESTAMP ), "0", "0" },
    { "avroBytes10", "pentahoBinary10", String.valueOf( AvroSpec.DataType.STRING.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_BINARY ), "0", "0" }
  };

  String[][] inputSchemaDescription = new String[][] {
    { "avroField1", "pentahoField1", String.valueOf( AvroSpec.DataType.STRING.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_STRING ) },
    { "avroField2", "pentahoField2", String.valueOf( AvroSpec.DataType.STRING.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_STRING ) },
    { "avroDouble3", "pentahoNumber3", String.valueOf( AvroSpec.DataType.STRING.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_STRING ) },
    { "avroDecimal4", "pentahoBigNumber4", String.valueOf( AvroSpec.DataType.STRING.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_STRING ) },
    { "avroString5", "pentahoInet5", String.valueOf( AvroSpec.DataType.STRING.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_STRING ) },
    { "avroBoolean6", "pentahoBoolean6", String.valueOf( AvroSpec.DataType.STRING.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_STRING ) },
    { "avroInt7", "pentahoInt7", String.valueOf( AvroSpec.DataType.STRING.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_STRING ) },
    { "avroDate8", "pentahoDate8", String.valueOf( AvroSpec.DataType.STRING.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_STRING ) },
    { "avroTimestamp9", "pentahoTimestamp9", String.valueOf( AvroSpec.DataType.STRING.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_STRING ) },
    { "avroBytes10", "pentahoBinary10", String.valueOf( AvroSpec.DataType.STRING.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_STRING ) }
  };


  RowMeta rowMeta = buildRowMeta( outputSchemaDescription );
  RowMetaAndData rowMetaAndData = new RowMetaAndData( rowMeta, rowData );

  String[] expectedResults = new String[ rowData.length ];
  for ( int i = 0; i < rowMetaAndData.size(); i++ ) {
    expectedResults[ i ] = rowMetaAndData.getString( i, null );
  }

  doReadWrite( inputSchemaDescription, outputSchemaDescription, rowData,
    IPentahoAvroOutputFormat.COMPRESSION.UNCOMPRESSED, "avroOutputNone.avro", null, expectedResults, true );

}
 
Example 17
Source File: TestUtilities.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
/**
 * Check the 2 lists comparing the rows in order. If they are not the same fail the test.
 * 
 * @param rows1
 *          set 1 of rows to compare
 * @param rows2
 *          set 2 of rows to compare
 * @param fileNameColumn
 *          Number of the column containing the filename. This is only checked for being non-null (some systems maybe
 *          canonize names differently than we input).
 */
public static void checkRows( List<RowMetaAndData> rows1, List<RowMetaAndData> rows2, int fileNameColumn )
  throws TestFailedException {

  int idx = 1;
  if ( rows1.size() != rows2.size() ) {
    throw new TestFailedException( "Number of rows is not the same: " + rows1.size() + " and " + rows2.size() );
  }
  Iterator<RowMetaAndData> itrRows1 = rows1.iterator();
  Iterator<RowMetaAndData> itrRows2 = rows2.iterator();

  while ( itrRows1.hasNext() && itrRows2.hasNext() ) {
    RowMetaAndData rowMetaAndData1 = itrRows1.next();
    RowMetaAndData rowMetaAndData2 = itrRows2.next();

    RowMetaInterface rowMetaInterface1 = rowMetaAndData1.getRowMeta();

    Object[] rowObject1 = rowMetaAndData1.getData();
    Object[] rowObject2 = rowMetaAndData2.getData();

    if ( rowMetaAndData1.size() != rowMetaAndData2.size() ) {
      throw new TestFailedException( "row number " + idx + " is not equal" );
    }

    int[] fields = new int[rowMetaInterface1.size()];
    for ( int ydx = 0; ydx < rowMetaInterface1.size(); ydx++ ) {
      fields[ydx] = ydx;
    }

    try {
      if ( fileNameColumn >= 0 ) {
        rowObject1[fileNameColumn] = rowObject2[fileNameColumn];
      }
      if ( rowMetaAndData1.getRowMeta().compare( rowObject1, rowObject2, fields ) != 0 ) {
        throw new ComparisonFailure( "row nr " + idx + " is not equal", rowMetaInterface1.getString( rowObject1 ),
            rowMetaInterface1.getString( rowObject2 ) );
      }
    } catch ( KettleValueException e ) {
      throw new TestFailedException( "row nr " + idx + " is not equal" );
    }
    idx++;
  }
}
 
Example 18
Source File: TestUtilities.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
/**
 * Check the 2 lists comparing the rows in order. If they are not the same fail the test.
 *
 * @param rows1          set 1 of rows to compare
 * @param rows2          set 2 of rows to compare
 * @param fileNameColumn Number of the column containing the filename. This is only checked for being non-null (some
 *                       systems maybe canonize names differently than we input).
 */
public static void checkRows( List<RowMetaAndData> rows1, List<RowMetaAndData> rows2, int fileNameColumn )
  throws TestFailedException {

  int idx = 1;
  if ( rows1.size() != rows2.size() ) {
    throw new TestFailedException( "Number of rows is not the same: " + rows1.size() + " and " + rows2.size() );
  }
  Iterator<RowMetaAndData> itrRows1 = rows1.iterator();
  Iterator<RowMetaAndData> itrRows2 = rows2.iterator();

  while ( itrRows1.hasNext() && itrRows2.hasNext() ) {
    RowMetaAndData rowMetaAndData1 = itrRows1.next();
    RowMetaAndData rowMetaAndData2 = itrRows2.next();

    RowMetaInterface rowMetaInterface1 = rowMetaAndData1.getRowMeta();

    Object[] rowObject1 = rowMetaAndData1.getData();
    Object[] rowObject2 = rowMetaAndData2.getData();

    if ( rowMetaAndData1.size() != rowMetaAndData2.size() ) {
      throw new TestFailedException( "row number " + idx + " is not equal" );
    }

    int[] fields = new int[ rowMetaInterface1.size() ];
    for ( int ydx = 0; ydx < rowMetaInterface1.size(); ydx++ ) {
      fields[ ydx ] = ydx;
    }

    try {
      if ( fileNameColumn >= 0 ) {
        rowObject1[ fileNameColumn ] = rowObject2[ fileNameColumn ];
      }
      if ( rowMetaAndData1.getRowMeta().compare( rowObject1, rowObject2, fields ) != 0 ) {
        throw new ComparisonFailure( "row nr " + idx + " is not equal",
        rowMetaInterface1.getString( rowObject1 ),
        rowMetaInterface1.getString( rowObject2 ) ); 
      }
    } catch ( KettleValueException e ) {
      throw new TestFailedException( "row nr " + idx + " is not equal" );
    }
    idx++;
  }
}
 
Example 19
Source File: PentahoAvroReadWriteTest.java    From pentaho-hadoop-shims with Apache License 2.0 4 votes vote down vote up
@Test
public void testConvertToDoubleOnOutput() throws Exception {
  Object[] rowData =
    new Object[] { "1", "2", new Double( 3.1 ), new BigDecimal( "4.1" ), DEFAULT_INET_ADDR, Boolean.TRUE,
      new Long( 1 ), date1, timeStamp1 };
  String[][] outputSchemaDescription = new String[][] {
    { "avroField1", "pentahoField1", String.valueOf( AvroSpec.DataType.DOUBLE.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_STRING ), "0", "0" },
    { "avroField2", "pentahoField2", String.valueOf( AvroSpec.DataType.DOUBLE.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_STRING ), "0", "0" },
    { "avroDouble3", "pentahoNumber3", String.valueOf( AvroSpec.DataType.DOUBLE.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_NUMBER ), "0", "0" },
    { "avroDecimal4", "pentahoBigNumber4", String.valueOf( AvroSpec.DataType.DOUBLE.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_BIGNUMBER ), "2", "1" },
    { "avroString5", "pentahoInet5", String.valueOf( AvroSpec.DataType.DOUBLE.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_INET ), "0", "0" },
    { "avroBoolean6", "pentahoBoolean6", String.valueOf( AvroSpec.DataType.DOUBLE.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_BOOLEAN ), "0", "0" },
    { "avroInt7", "pentahoInt7", String.valueOf( AvroSpec.DataType.DOUBLE.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_INTEGER ), "0", "0" },
    { "avroDate8", "pentahoDate8", String.valueOf( AvroSpec.DataType.DOUBLE.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_DATE ), "0", "0" },
    { "avroTimestamp9", "pentahoTimestamp9", String.valueOf( AvroSpec.DataType.DOUBLE.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_TIMESTAMP ), "0", "0" }
  };

  String[][] inputSchemaDescription = new String[][] {
    { "avroField1", "pentahoField1", String.valueOf( AvroSpec.DataType.DOUBLE.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_NUMBER ) },
    { "avroField2", "pentahoField2", String.valueOf( AvroSpec.DataType.DOUBLE.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_NUMBER ) },
    { "avroDouble3", "pentahoNumber3", String.valueOf( AvroSpec.DataType.DOUBLE.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_NUMBER ) },
    { "avroDecimal4", "pentahoBigNumber4", String.valueOf( AvroSpec.DataType.DOUBLE.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_NUMBER ) },
    { "avroString5", "pentahoInet5", String.valueOf( AvroSpec.DataType.DOUBLE.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_NUMBER ) },
    { "avroBoolean6", "pentahoBoolean6", String.valueOf( AvroSpec.DataType.DOUBLE.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_NUMBER ) },
    { "avroInt7", "pentahoInt7", String.valueOf( AvroSpec.DataType.DOUBLE.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_NUMBER ) },
    { "avroDate8", "pentahoDate8", String.valueOf( AvroSpec.DataType.DOUBLE.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_NUMBER ) },
    { "avroTimestamp9", "pentahoTimestamp9", String.valueOf( AvroSpec.DataType.LONG.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_NUMBER ) }
  };


  RowMeta rowMeta = buildRowMeta( outputSchemaDescription );
  RowMetaAndData rowMetaAndData = new RowMetaAndData( rowMeta, rowData );

  Double[] expectedResults = new Double[ rowData.length ];
  for ( int i = 0; i < rowMetaAndData.size(); i++ ) {
    expectedResults[ i ] = rowMetaAndData.getNumber( i, -999 );
  }

  doReadWrite( inputSchemaDescription, outputSchemaDescription, rowData,
    IPentahoAvroOutputFormat.COMPRESSION.UNCOMPRESSED, "avroOutputNone.avro", null, expectedResults, true );
}
 
Example 20
Source File: PentahoAvroReadWriteTest.java    From pentaho-hadoop-shims with Apache License 2.0 4 votes vote down vote up
@Test
public void testConvertToLongOnOutput() throws Exception {
  Object[] rowData =
    new Object[] { "1", "2", new Double( 3.1 ), new BigDecimal( "4.1" ), DEFAULT_INET_ADDR, Boolean.TRUE,
      new Long( 1 ), date1, timeStamp1 };
  String[][] outputSchemaDescription = new String[][] {
    { "avroField1", "pentahoField1", String.valueOf( AvroSpec.DataType.LONG.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_STRING ), "0", "0" },
    { "avroField2", "pentahoField2", String.valueOf( AvroSpec.DataType.LONG.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_STRING ), "0", "0" },
    { "avroDouble3", "pentahoNumber3", String.valueOf( AvroSpec.DataType.LONG.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_NUMBER ), "0", "0" },
    { "avroDecimal4", "pentahoBigNumber4", String.valueOf( AvroSpec.DataType.LONG.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_BIGNUMBER ), "2", "1" },
    { "avroString5", "pentahoInet5", String.valueOf( AvroSpec.DataType.LONG.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_INET ), "0", "0" },
    { "avroBoolean6", "pentahoBoolean6", String.valueOf( AvroSpec.DataType.LONG.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_BOOLEAN ), "0", "0" },
    { "avroInt7", "pentahoInt7", String.valueOf( AvroSpec.DataType.LONG.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_INTEGER ), "0", "0" },
    { "avroDate8", "pentahoDate8", String.valueOf( AvroSpec.DataType.LONG.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_DATE ), "0", "0" },
    { "avroTimestamp9", "pentahoTimestamp9", String.valueOf( AvroSpec.DataType.LONG.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_TIMESTAMP ), "0", "0" }
  };

  String[][] inputSchemaDescription = new String[][] {
    { "avroField1", "pentahoField1", String.valueOf( AvroSpec.DataType.LONG.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_INTEGER ) },
    { "avroField2", "pentahoField2", String.valueOf( AvroSpec.DataType.LONG.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_INTEGER ) },
    { "avroDouble3", "pentahoNumber3", String.valueOf( AvroSpec.DataType.LONG.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_INTEGER ) },
    { "avroDecimal4", "pentahoBigNumber4", String.valueOf( AvroSpec.DataType.LONG.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_INTEGER ) },
    { "avroString5", "pentahoInet5", String.valueOf( AvroSpec.DataType.LONG.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_INTEGER ) },
    { "avroBoolean6", "pentahoBoolean6", String.valueOf( AvroSpec.DataType.LONG.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_INTEGER ) },
    { "avroInt7", "pentahoInt7", String.valueOf( AvroSpec.DataType.LONG.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_INTEGER ) },
    { "avroDate8", "pentahoDate8", String.valueOf( AvroSpec.DataType.LONG.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_INTEGER ) },
    { "avroTimestamp9", "pentahoTimestamp9", String.valueOf( AvroSpec.DataType.LONG.ordinal() ),
      String.valueOf( ValueMetaInterface.TYPE_INTEGER ) }
  };


  RowMeta rowMeta = buildRowMeta( outputSchemaDescription );
  RowMetaAndData rowMetaAndData = new RowMetaAndData( rowMeta, rowData );

  Long[] expectedResults = new Long[ rowData.length ];
  for ( int i = 0; i < rowMetaAndData.size(); i++ ) {
    expectedResults[ i ] = rowMetaAndData.getInteger( i, -999 );
  }

  doReadWrite( inputSchemaDescription, outputSchemaDescription, rowData,
    IPentahoAvroOutputFormat.COMPRESSION.UNCOMPRESSED, "avroOutputNone.avro", null, expectedResults, true );
}