Java Code Examples for org.pentaho.di.core.row.RowDataUtil#addRowData()

The following examples show how to use org.pentaho.di.core.row.RowDataUtil#addRowData() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: RowOutputConverter.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public Object[] getRow( Object[] baseOutputRow, Object[] rawPartRow, JsonInputData data ) throws KettleException {
  if ( rawPartRow == null ) {
    return null;
  }
  for ( int i = 0; i < rawPartRow.length; i++ ) {
    int outIdx = data.totalpreviousfields + i;
    Object val =
        getValue( data.outputRowMeta.getValueMeta( outIdx ), data.convertRowMeta.getValueMeta( outIdx ),
            rawPartRow[i] );
    rawPartRow[i] = val;
    if ( val == null && data.repeatedFields.get( i ) && data.previousRow != null ) {
      rawPartRow[i] = data.previousRow[outIdx];
    }
  }
  data.previousRow = RowDataUtil.addRowData( baseOutputRow, data.totalpreviousfields, rawPartRow );
  return data.previousRow;
}
 
Example 2
Source File: Constant.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@Override
public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException {
  Object[] r = null;
  r = getRow();

  if ( r == null ) { // no more rows to be expected from the previous step(s)
    setOutputDone();
    return false;
  }

  if ( data.firstRow ) {
    // The output meta is the original input meta + the
    // additional constant fields.

    data.firstRow = false;
    data.outputMeta = getInputRowMeta().clone();
    meta.getFields( data.outputMeta, getStepname(), null, null, this, repository, metaStore );
  }

  // Add the constant data to the end of the row.
  r = RowDataUtil.addRowData( r, getInputRowMeta().size(), data.getConstants().getData() );

  putRow( data.outputMeta, r );

  if ( log.isRowLevel() ) {
    logRowlevel( BaseMessages.getString(
      PKG, "Constant.Log.Wrote.Row", Long.toString( getLinesWritten() ), getInputRowMeta().getString( r ) ) );
  }

  if ( checkFeedback( getLinesWritten() ) ) {
    if ( log.isBasic() ) {
      logBasic( BaseMessages.getString( PKG, "Constant.Log.LineNr", Long.toString( getLinesWritten() ) ) );
    }
  }

  return true;
}
 
Example 3
Source File: GroupBy.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
private Object[] buildResult( Object[] r ) throws KettleValueException {
  Object[] result = null;
  if ( r != null || meta.isAlwaysGivingBackOneRow() ) {
    result = RowDataUtil.allocateRowData( data.groupnrs.length );
    if ( r != null ) {
      for ( int i = 0; i < data.groupnrs.length; i++ ) {
        result[ i ] = r[ data.groupnrs[ i ] ];
      }
    }

    result = RowDataUtil.addRowData( result, data.groupnrs.length, getAggregateResult() );
  }

  return result;
}
 
Example 4
Source File: FuzzyMatch.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
private Object[] lookupValues( RowMetaInterface rowMeta, Object[] row ) throws KettleException {
  if ( first ) {
    first = false;

    data.outputRowMeta = getInputRowMeta().clone();
    meta.getFields(
      data.outputRowMeta, getStepname(), new RowMetaInterface[] { data.infoMeta }, null, this, repository,
      metaStore );

    // Check lookup field
    data.indexOfMainField = getInputRowMeta().indexOfValue( environmentSubstitute( meta.getMainStreamField() ) );
    if ( data.indexOfMainField < 0 ) {
      // The field is unreachable !
      throw new KettleException( BaseMessages.getString( PKG, "FuzzyMatch.Exception.CouldnotFindMainField", meta
        .getMainStreamField() ) );
    }
  }
  Object[] add = null;
  if ( row[ data.indexOfMainField ] == null ) {
    add = buildEmptyRow();
  } else {
    try {
      add = getFromCache( row );
    } catch ( Exception e ) {
      throw new KettleStepException( e );
    }
  }
  return RowDataUtil.addRowData( row, rowMeta.size(), add );
}
 
Example 5
Source File: TableInput.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
private RowMetaAndData readStartDate() throws KettleException {
  if ( log.isDetailed() ) {
    logDetailed( "Reading from step [" + data.infoStream.getStepname() + "]" );
  }

  RowMetaInterface parametersMeta = new RowMeta();
  Object[] parametersData = new Object[] {};

  RowSet rowSet = findInputRowSet( data.infoStream.getStepname() );
  if ( rowSet != null ) {
    Object[] rowData = getRowFrom( rowSet ); // rows are originating from "lookup_from"
    while ( rowData != null ) {
      parametersData = RowDataUtil.addRowData( parametersData, parametersMeta.size(), rowData );
      parametersMeta.addRowMeta( rowSet.getRowMeta() );

      rowData = getRowFrom( rowSet ); // take all input rows if needed!
    }

    if ( parametersMeta.size() == 0 ) {
      throw new KettleException( "Expected to read parameters from step ["
        + data.infoStream.getStepname() + "] but none were found." );
    }
  } else {
    throw new KettleException( "Unable to find rowset to read from, perhaps step ["
      + data.infoStream.getStepname() + "] doesn't exist. (or perhaps you are trying a preview?)" );
  }

  RowMetaAndData parameters = new RowMetaAndData( parametersMeta, parametersData );

  return parameters;
}
 
Example 6
Source File: XMLInputStream.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
private void putRowOut( Object[] r ) throws KettleStepException, KettleValueException {

    data.rowNumber++;
    if ( data.pos_xml_filename != -1 ) {
      r[data.pos_xml_filename] = new String( data.filenames[( data.filenr - 1 )] );
    }
    if ( data.pos_xml_row_number != -1 ) {
      r[data.pos_xml_row_number] = new Long( data.rowNumber );
    }
    if ( data.pos_xml_element_id != -1 ) {
      r[data.pos_xml_element_id] = data.elementLevelID[data.elementLevel];
    }
    if ( data.pos_xml_element_level != -1 ) {
      r[data.pos_xml_element_level] = new Long( data.elementLevel );
    }
    if ( data.pos_xml_parent_element_id != -1 ) {
      r[data.pos_xml_parent_element_id] = data.elementParentID[data.elementLevel];
    }
    if ( data.pos_xml_path != -1 ) {
      r[data.pos_xml_path] = data.elementPath[data.elementLevel];
    }
    if ( data.pos_xml_parent_path != -1 && data.elementLevel > 0 ) {
      r[data.pos_xml_parent_path] = data.elementPath[data.elementLevel - 1];
    }

    // We could think of adding an option to filter Start_end Document / Elements, RegEx?
    // We could think of adding columns identifying Element-Blocks

    // Skip rows? (not exact science since some attributes could be mixed within the last row)
    if ( data.nrRowsToSkip == 0 || data.rowNumber > data.nrRowsToSkip ) {
      if ( log.isRowLevel() ) {
        logRowlevel( "Read row: " + data.outputRowMeta.getString( r ) );
      }
      if ( data.currentInputRow != null ) {
        r = RowDataUtil.addRowData( (Object[]) data.currentInputRow.clone(), data.previousFieldsNumber, r );
      }
      putRow( data.finalOutputRowMeta, r );
    }
  }
 
Example 7
Source File: AnalyticQuery.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public void processQueueObjectAt( int i ) throws KettleStepException {
  int index = i - 1;
  Object[] rows = data.data.toArray();

  Object[] fields = new Object[meta.getNumberOfFields()];
  for ( int j = 0; j < meta.getNumberOfFields(); j++ ) {
    // field_index is the location inside a row of the subject of this
    // ie, ORDERTOTAL might be the subject ofthis field lag or lead
    // so we determine that ORDERTOTAL's index in the row
    int field_index = data.inputRowMeta.indexOfValue( meta.getSubjectField()[j] );
    int row_index = 0;
    switch ( meta.getAggregateType()[j] ) {
      case AnalyticQueryMeta.TYPE_FUNCT_LAG:
        row_index = index - meta.getValueField()[j];
        break;
      case AnalyticQueryMeta.TYPE_FUNCT_LEAD:
        row_index = index + meta.getValueField()[j];
        break;
      default:
        break;
    }
    if ( row_index < rows.length && row_index >= 0 ) {
      Object[] singleRow = (Object[]) rows[row_index];
      if ( singleRow != null && singleRow[field_index] != null ) {
        fields[j] = ( (Object[]) rows[row_index] )[field_index];
      } else {
        // set default
        fields[j] = null;
      }
    } else {
      // set default
      fields[j] = null;
    }
  }

  Object[] newRow = RowDataUtil.addRowData( (Object[]) rows[index], data.inputRowMeta.size(), fields );

  putRow( data.outputRowMeta, newRow );

}
 
Example 8
Source File: DetectLastRow.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException {
  meta = (DetectLastRowMeta) smi;
  data = (DetectLastRowData) sdi;

  Object[] r = getRow(); // Get row from input rowset & set row busy!

  if ( first ) {
    if ( getInputRowMeta() == null ) {
      setOutputDone();
      return false;
    }

    // get the RowMeta
    data.previousRowMeta = getInputRowMeta().clone();
    data.NrPrevFields = data.previousRowMeta.size();
    data.outputRowMeta = data.previousRowMeta;
    meta.getFields( data.outputRowMeta, getStepname(), null, null, this, repository, metaStore );
  }
  Object[] outputRow = null;

  if ( r == null ) { // no more input to be expected...

    if ( previousRow != null ) {
      //
      // Output the last row with last row indicator set to true.
      //
      if ( !Utils.isEmpty( meta.getResultFieldName() ) ) {
        outputRow = RowDataUtil.addRowData( previousRow, getInputRowMeta().size(), data.getTrueArray() );
      } else {
        outputRow = previousRow;
      }

      putRow( data.outputRowMeta, outputRow ); // copy row to output rowset(s);

      if ( log.isRowLevel() ) {
        logRowlevel( BaseMessages.getString( PKG, "DetectLastRow.Log.WroteRowToNextStep" )
          + data.outputRowMeta.getString( outputRow ) );
      }

      if ( checkFeedback( getLinesRead() ) ) {
        logBasic( BaseMessages.getString( PKG, "DetectLastRow.Log.LineNumber" ) + getLinesRead() );
      }
    }

    setOutputDone();
    return false;
  }

  if ( !first ) {
    outputRow = RowDataUtil.addRowData( previousRow, getInputRowMeta().size(), data.getFalseArray() );
    putRow( data.outputRowMeta, outputRow ); // copy row to output rowset(s);

    if ( log.isRowLevel() ) {
      logRowlevel( BaseMessages.getString( PKG, "DetectLastRow.Log.WroteRowToNextStep" )
        + data.outputRowMeta.getString( outputRow ) );
    }

    if ( checkFeedback( getLinesRead() ) ) {
      logBasic( BaseMessages.getString( PKG, "DetectLastRow.Log.LineNumber" ) + getLinesRead() );
    }
  }
  // keep track of the current row
  previousRow = r;
  if ( first ) {
    first = false;
  }

  return true;
}
 
Example 9
Source File: NumberRange.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
/**
 * Column number where the input value is stored
 */

public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException {
  Object[] row = getRow();
  if ( row == null ) {
    setOutputDone();
    return false;
  }

  if ( first ) {
    first = false;

    numberRange = new NumberRangeSet( meta.getRules(), meta.getFallBackValue() );
    data.outputRowMeta = getInputRowMeta().clone();
    // Prepare output fields
    meta.getFields( data.outputRowMeta, getStepname(), null, null, this, repository, metaStore );

    // Find column numbers
    data.inputColumnNr = data.outputRowMeta.indexOfValue( meta.getInputField() );

    // Check if a field was not available
    if ( data.inputColumnNr < 0 ) {
      logError( "Field for input could not be found: " + meta.getInputField() );
      return false;
    }
  }
  try {
    // get field value
    Double value = getInputRowMeta().getNumber( row, data.inputColumnNr );

    // return range
    String ranges = numberRange.evaluate( value );
    // add value to output
    row = RowDataUtil.addRowData( row, getInputRowMeta().size(), new Object[] { ranges } );
    putRow( data.outputRowMeta, row );
    if ( checkFeedback( getLinesRead() ) ) {
      if ( log.isDetailed() ) {
        logDetailed( BaseMessages.getString( PKG, "NumberRange.Log.LineNumber" ) + getLinesRead() );
      }
    }
  } catch ( KettleException e ) {
    boolean sendToErrorRow = false;
    String errorMessage = null;

    if ( getStepMeta().isDoingErrorHandling() ) {
      sendToErrorRow = true;
      errorMessage = e.toString();
    } else {
      logError( BaseMessages.getString( PKG, "NumberRange.Log.ErrorInStepRunning" ) + e.getMessage() );
      setErrors( 1 );
      stopAll();
      setOutputDone(); // signal end to receiver(s)
      return false;
    }
    if ( sendToErrorRow ) {
      // Simply add this row to the error row
      putError( getInputRowMeta(), row, 1, errorMessage, null, "NumberRange001" );
    }
  }

  return true;
}
 
Example 10
Source File: YamlInput.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
private Object[] getRowData() throws KettleException {
  // Build an empty row based on the meta-data
  Object[] outputRowData = null;

  try {
    // Create new row...
    outputRowData = data.yaml.getRow( data.rowMeta );
    if ( outputRowData == null ) {
      return null;
    }

    if ( data.readrow != null ) {
      outputRowData = RowDataUtil.addRowData( data.readrow, data.totalPreviousFields, outputRowData );
    } else {
      outputRowData = RowDataUtil.resizeArray( outputRowData, data.totalOutStreamFields );
    }

    int rowIndex = data.totalOutFields;

    // See if we need to add the filename to the row...
    if ( meta.includeFilename() && !Utils.isEmpty( meta.getFilenameField() ) ) {
      outputRowData[rowIndex++] = KettleVFS.getFilename( data.file );
    }
    // See if we need to add the row number to the row...
    if ( meta.includeRowNumber() && !Utils.isEmpty( meta.getRowNumberField() ) ) {
      outputRowData[rowIndex++] = new Long( data.rownr );
    }

  } catch ( Exception e ) {
    boolean sendToErrorRow = false;
    String errorMessage = null;

    if ( getStepMeta().isDoingErrorHandling() ) {
      sendToErrorRow = true;
      errorMessage = e.toString();
    } else {
      logError( BaseMessages.getString( PKG, "YamlInput.ErrorInStepRunning", e.toString() ) );
      setErrors( 1 );
      stopAll();
      logError( Const.getStackTracker( e ) );
      setOutputDone(); // signal end to receiver(s)
    }
    if ( sendToErrorRow ) {
      // Simply add this row to the error row
      putError( getInputRowMeta(), outputRowData, 1, errorMessage, null, "YamlInput001" );
    }
  }

  return outputRowData;
}
 
Example 11
Source File: GroupBy.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
private void handleLastOfGroup() throws KettleException {
  if ( meta.passAllRows() ) {
    // ALL ROWS

    if ( data.previous != null ) {
      calcAggregate( data.previous );
      addToBuffer( data.previous );
    }
    data.groupResult = getAggregateResult();

    Object[] row = getRowFromBuffer();

    long lineNr = 0;
    while ( row != null ) {
      int size = data.inputRowMeta.size();
      row = RowDataUtil.addRowData( row, size, data.groupResult );
      size += data.groupResult.length;
      lineNr++;

      if ( meta.isAddingLineNrInGroup() && !Utils.isEmpty( meta.getLineNrInGroupField() ) ) {
        Object lineNrValue = new Long( lineNr );
        // ValueMetaInterface lineNrValueMeta = new ValueMeta(meta.getLineNrInGroupField(),
        // ValueMetaInterface.TYPE_INTEGER);
        // lineNrValueMeta.setLength(9);
        row = RowDataUtil.addValueData( row, size, lineNrValue );
        size++;
      }

      addCumulativeSums( row );
      addCumulativeAverages( row );

      putRow( data.outputRowMeta, row );
      row = getRowFromBuffer();
    }
    closeInput();
  } else {
    // JUST THE GROUP + AGGREGATE

    // Don't forget the last set of rows...
    if ( data.previous != null ) {
      calcAggregate( data.previous );
    }
    Object[] result = buildResult( data.previous );
    if ( result != null ) {
      putRow( data.groupAggMeta, result );
    }
  }
}
 
Example 12
Source File: SecretKeyGenerator.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException {

    Object[] row;
    Object[] rowIn = null;

    if ( data.readsRows ) {
      rowIn = getRow();
      if ( rowIn == null ) {
        setOutputDone();
        return false;
      }

      if ( first ) {
        first = false;
        data.prevNrField = getInputRowMeta().size();
        data.outputRowMeta = getInputRowMeta().clone();
        meta.getFields( data.outputRowMeta, getStepname(), null, null, this, repository, metaStore );
      }

    } else {

      if ( first ) {
        first = false;
        data.outputRowMeta = new RowMeta();
        meta.getFields( data.outputRowMeta, getStepname(), null, null, this, repository, metaStore );
      }
    }
    for ( int i = 0; i < data.nr && !isStopped(); i++ ) {

      for ( int j = 0; j < data.secretKeyCount[i] && !isStopped(); j++ ) {

        // Create a new row
        row = buildEmptyRow();
        incrementLinesRead();

        int index = 0;

        try {
          // Return secret key
          if ( meta.isOutputKeyInBinary() ) {
            row[index++] = data.cryptoTrans[i].generateKey( data.secretKeyLen[i] );
          } else {
            row[index++] = data.cryptoTrans[i].generateKeyAsHex( data.secretKeyLen[i] );
          }

        } catch ( CryptoException k ) {
          throw new KettleException( BaseMessages.getString( PKG, "SecretKeyGenerator.KeyGenerationError", i ), k );
        }

        if ( data.addAlgorithmOutput ) {
          // add algorithm
          row[index++] = meta.getAlgorithm()[i];
        }

        if ( data.addSecretKeyLengthOutput ) {
          // add secret key len
          row[index++] = new Long( data.secretKeyLen[i] );
        }

        if ( data.readsRows ) {
          // build output row
          row = RowDataUtil.addRowData( rowIn, data.prevNrField, row );
        }

        if ( isRowLevel() ) {
          logRowlevel( BaseMessages.getString( PKG, "SecretKeyGenerator.Log.ValueReturned", data.outputRowMeta
            .getString( row ) ) );
        }

        putRow( data.outputRowMeta, row );
      }
    }

    setOutputDone();
    return false;
  }
 
Example 13
Source File: GetVariable.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException {
  Object[] rowData;

  if ( data.readsRows ) {
    rowData = getRow();
    if ( rowData == null ) {
      setOutputDone();
      return false;
    }
  } else {
    rowData = RowDataUtil.allocateRowData( 0 );
    incrementLinesRead();
  }

  // initialize
  if ( first && rowData != null ) {
    first = false;

    // Make output meta data
    //
    if ( data.readsRows ) {
      data.inputRowMeta = getInputRowMeta();
    } else {
      data.inputRowMeta = new RowMeta();
    }
    data.outputRowMeta = data.inputRowMeta.clone();
    meta.getFields( data.outputRowMeta, getStepname(), null, null, this, repository, metaStore );

    // Create a copy of the output row metadata to do the data conversion...
    //
    data.conversionMeta = data.outputRowMeta.cloneToType( ValueMetaInterface.TYPE_STRING );

    // Add the variables to the row...
    //
    // Keep the Object[] for speed. Although this step will always be used in "small" amounts, there's always going to
    // be those cases where performance is required.
    //
    int fieldsLength = meta.getFieldDefinitions().length;
    data.extraData = new Object[fieldsLength];
    for ( int i = 0; i < fieldsLength; i++ ) {
      String newValue = environmentSubstitute( meta.getFieldDefinitions()[i].getVariableString() );
      if ( log.isDetailed() ) {
        logDetailed( "field [" + meta.getFieldDefinitions()[i].getFieldName() + "] has value [" + newValue + "]" );
      }

      // Convert the data to the desired data type...
      //
      ValueMetaInterface targetMeta = data.outputRowMeta.getValueMeta( data.inputRowMeta.size() + i );
      ValueMetaInterface sourceMeta = data.conversionMeta.getValueMeta( data.inputRowMeta.size() + i ); // String type
                                                                                                        // +
                                                                                                        // conversion
                                                                                                        // masks,
                                                                                                        // symbols,
                                                                                                        // trim type,
                                                                                                        // etc
      data.extraData[i] = targetMeta.convertData( sourceMeta, newValue );
    }
  }

  rowData = RowDataUtil.addRowData( rowData, data.inputRowMeta.size(), data.extraData );

  putRow( data.outputRowMeta, rowData );

  if ( !data.readsRows ) { // Just one row and then stop!

    setOutputDone();
    return false;
  }

  return true;
}
 
Example 14
Source File: RowMetaAndData.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public void mergeRowMetaAndData( RowMetaAndData rowMetaAndData, String originStepName ) {
  int originalMetaSize = rowMeta.size();
  rowMeta.mergeRowMeta( rowMetaAndData.getRowMeta(), originStepName );
  data = RowDataUtil.addRowData( data, originalMetaSize, rowMetaAndData.getData() );
}
 
Example 15
Source File: KafkaConsumer.java    From pentaho-kafka-consumer with Apache License 2.0 4 votes vote down vote up
public boolean processRow(StepMetaInterface smi, StepDataInterface sdi) throws KettleException {
    Object[] r = getRow();
    if (r == null) {
        /*
* If we have no input rows, make sure we at least run once to
* produce output rows. This allows us to consume without requiring
* an input step.
*/
        if (!first) {
            setOutputDone();
            return false;
        }
        r = new Object[0];
    } else {
        incrementLinesRead();
    }

    final Object[] inputRow = r;

    KafkaConsumerMeta meta = (KafkaConsumerMeta) smi;
    final KafkaConsumerData data = (KafkaConsumerData) sdi;

    if (first) {
        first = false;
        data.inputRowMeta = getInputRowMeta();
        // No input rows means we just dummy data
        if (data.inputRowMeta == null) {
            data.outputRowMeta = new RowMeta();
            data.inputRowMeta = new RowMeta();
        } else {
            data.outputRowMeta = getInputRowMeta().clone();
        }
        meta.getFields(data.outputRowMeta, getStepname(), null, null, this, null, null);
    }

    try {
        long timeout;
        String strData = meta.getTimeout();

        timeout = getTimeout(strData);

        logDebug("Starting message consumption with overall timeout of " + timeout + "ms");

        KafkaConsumerCallable kafkaConsumer = new KafkaConsumerCallable(meta, data, this) {
            protected void messageReceived(byte[] key, byte[] message) throws KettleException {
                Object[] newRow = RowDataUtil.addRowData(inputRow.clone(), data.inputRowMeta.size(),
                        new Object[]{message, key});
                putRow(data.outputRowMeta, newRow);

                if (isRowLevel()) {
                    logRowlevel(Messages.getString("KafkaConsumer.Log.OutputRow",
                            Long.toString(getLinesWritten()), data.outputRowMeta.getString(newRow)));
                }
            }
        };
        if (timeout > 0) {
            logDebug("Starting timed consumption");
            ExecutorService executor = Executors.newSingleThreadExecutor();
            try {
                Future<?> future = executor.submit(kafkaConsumer);
                executeFuture(timeout, future);
            } finally {
                executor.shutdown();
            }
        } else {
            logDebug("Starting direct consumption");
            kafkaConsumer.call();
        }
    } catch (KettleException e) {
        if (!getStepMeta().isDoingErrorHandling()) {
            logError(Messages.getString("KafkaConsumer.ErrorInStepRunning", e.getMessage()));
            setErrors(1);
            stopAll();
            setOutputDone();
            return false;
        }
        putError(getInputRowMeta(), r, 1, e.toString(), null, getStepname());
    }
    return true;
}