Java Code Examples for org.pentaho.di.trans.step.StepMeta#getName()

The following examples show how to use org.pentaho.di.trans.step.StepMeta#getName() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: DeleteMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void analyseImpact( List<DatabaseImpact> impact, TransMeta transMeta, StepMeta stepMeta,
  RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, Repository repository,
  IMetaStore metaStore ) throws KettleStepException {
  if ( prev != null ) {
    // Lookup: we do a lookup on the natural keys
    for ( int i = 0; i < keyLookup.length; i++ ) {
      ValueMetaInterface v = prev.searchValueMeta( keyStream[i] );

      DatabaseImpact ii =
        new DatabaseImpact(
          DatabaseImpact.TYPE_IMPACT_DELETE, transMeta.getName(), stepMeta.getName(), databaseMeta
            .getDatabaseName(), tableName, keyLookup[i], keyStream[i],
          v != null ? v.getOrigin() : "?", "", "Type = " + v.toStringMeta() );
      impact.add( ii );
    }
  }
}
 
Example 2
Source File: PGBulkLoaderMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void analyseImpact( List<DatabaseImpact> impact, TransMeta transMeta, StepMeta stepMeta,
  RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, Repository repository,
  IMetaStore metaStore ) throws KettleStepException {
  if ( prev != null ) {
    /* DEBUG CHECK THIS */
    // Insert dateMask fields : read/write
    for ( int i = 0; i < fieldTable.length; i++ ) {
      ValueMetaInterface v = prev.searchValueMeta( fieldStream[i] );

      DatabaseImpact ii =
        new DatabaseImpact(
          DatabaseImpact.TYPE_IMPACT_READ_WRITE, transMeta.getName(), stepMeta.getName(), databaseMeta
            .getDatabaseName(), transMeta.environmentSubstitute( tableName ), fieldTable[i],
          fieldStream[i], v != null ? v.getOrigin() : "?", "", "Type = " + v.toStringMeta() );
      impact.add( ii );
    }
  }
}
 
Example 3
Source File: GPBulkLoaderMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
@Override
public void analyseImpact( List<DatabaseImpact> impact, TransMeta transMeta, StepMeta stepMeta,
  RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, Repository repository,
  IMetaStore metaStore ) throws KettleStepException {
  if ( prev != null ) {
    /* DEBUG CHECK THIS */
    // Insert dateMask fields : read/write
    for ( int i = 0; i < fieldTable.length; i++ ) {
      ValueMetaInterface v = prev.searchValueMeta( fieldStream[i] );

      DatabaseImpact ii =
        new DatabaseImpact(
          DatabaseImpact.TYPE_IMPACT_READ_WRITE, transMeta.getName(), stepMeta.getName(), databaseMeta
            .getDatabaseName(), transMeta.environmentSubstitute( tableName ), fieldTable[i],
          fieldStream[i], v != null ? v.getOrigin() : "?", "", "Type = " + v.toStringMeta() );
      impact.add( ii );
    }
  }
}
 
Example 4
Source File: MySQLBulkLoaderMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void analyseImpact( List<DatabaseImpact> impact, TransMeta transMeta, StepMeta stepMeta,
    RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, Repository repository,
    IMetaStore metaStore ) throws KettleStepException {
  if ( prev != null ) {
    /* DEBUG CHECK THIS */
    // Insert dateMask fields : read/write
    for ( int i = 0; i < fieldTable.length; i++ ) {
      ValueMetaInterface v = prev.searchValueMeta( fieldStream[i] );

      DatabaseImpact ii =
          new DatabaseImpact( DatabaseImpact.TYPE_IMPACT_READ_WRITE, transMeta.getName(), stepMeta.getName(),
              databaseMeta.getDatabaseName(), transMeta.environmentSubstitute( tableName ), fieldTable[i],
              fieldStream[i], v != null ? v.getOrigin() : "?", "", "Type = " + v.toStringMeta() );
      impact.add( ii );
    }
  }
}
 
Example 5
Source File: MonetDBBulkLoaderMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void analyseImpact( List<DatabaseImpact> impact, TransMeta transMeta, StepMeta stepMeta,
    RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, Repository repository,
    IMetaStore metaStore ) throws KettleStepException {
  if ( prev != null ) {
    /* DEBUG CHECK THIS */
    // Insert dateMask fields : read/write
    for ( int i = 0; i < fieldTable.length; i++ ) {
      ValueMetaInterface v = prev.searchValueMeta( fieldStream[i] );

      DatabaseImpact ii =
          new DatabaseImpact(
              DatabaseImpact.TYPE_IMPACT_READ_WRITE, transMeta.getName(), stepMeta.getName(), databaseMeta
              .getDatabaseName(), transMeta.environmentSubstitute( tableName ), fieldTable[i],
              fieldStream[i], v != null ? v.getOrigin() : "?", "", "Type = " + v.toStringMeta() );
      impact.add( ii );
    }
  }
}
 
Example 6
Source File: SpoonStepsDelegate.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void dupeStep( TransMeta transMeta, StepMeta stepMeta ) {
  spoon.getLog().logDebug(
    toString(), BaseMessages.getString( PKG, "Spoon.Log.DuplicateStep" ) + stepMeta.getName() ); // Duplicate
  // step:

  StepMeta stMeta = (StepMeta) stepMeta.clone();
  if ( stMeta != null ) {
    String newname = transMeta.getAlternativeStepname( stepMeta.getName() );
    int nr = 2;
    while ( transMeta.findStep( newname ) != null ) {
      newname = stepMeta.getName() + " (copy " + nr + ")";
      nr++;
    }
    stMeta.setName( newname );
    // Don't select this new step!
    stMeta.setSelected( false );
    Point loc = stMeta.getLocation();
    stMeta.setLocation( loc.x + 20, loc.y + 20 );
    transMeta.addStep( stMeta );
    spoon.addUndoNew( transMeta, new StepMeta[] { (StepMeta) stMeta.clone() }, new int[] { transMeta
      .indexOfStep( stMeta ) } );
    spoon.refreshTree();
    spoon.refreshGraph();
  }
}
 
Example 7
Source File: SingleThreaderDialog.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public static String getInjectorStep( TransMeta mappingTransMeta ) {
  for ( StepMeta stepMeta : mappingTransMeta.getSteps() ) {
    if ( stepMeta.getStepID().equals( "Injector" ) || stepMeta.getStepID().equals( "MappingInput" ) ) {
      return stepMeta.getName();
    }
  }
  return "";
}
 
Example 8
Source File: SparkTuningStepHandler.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings( "squid:S1181" )
public void openSparkTuning() {
  TransGraph transGraph = Spoon.getInstance().getActiveTransGraph();
  StepMeta stepMeta = transGraph.getCurrentStep();
  String title = BaseMessages.getString( PKG, "TransGraph.Dialog.SparkTuning.Title" )
    + " - " + stepMeta.getName();

  List<String> tuningProperties = SparkTunableProperties.getProperties( stepMeta.getStepID() );

  PropertiesComboDialog dialog = new PropertiesComboDialog(
    transGraph.getParent().getShell(),
    transGraph.getTransMeta(),
    stepMeta.getAttributes( SPARK_TUNING_PROPERTIES ),
    title,
    Const.getDocUrl( BaseMessages.getString( PKG, "SparkTuning.Help.Url" ) ),
    BaseMessages.getString( PKG, "SparkTuning.Help.Title" ),
    BaseMessages.getString( PKG, "SparkTuning.Help.Header" )
  );
  dialog.setComboOptions( tuningProperties );
  try {
    Map<String, String> properties = dialog.open();

    // null means the cancel button was clicked otherwise ok was clicked
    if ( null != properties ) {
      stepMeta.setAttributes( SPARK_TUNING_PROPERTIES, properties );
      stepMeta.setChanged();
      transGraph.getSpoon().setShellText();
    }
  } catch ( Throwable e ) {
    new ErrorDialog(
      Spoon.getInstance().getShell(), BaseMessages.getString( PKG, "SparkTuning.UnexpectedError" ), BaseMessages
      .getString( PKG, "SparkTuning.UnexpectedError" ), e );
  }
}
 
Example 9
Source File: DataSetHelper.java    From pentaho-pdi-dataset with Apache License 2.0 5 votes vote down vote up
private void tweakUnitTestStep( TransTweak stepTweak, boolean enable ) {
  Spoon spoon = ( (Spoon) SpoonFactory.getInstance() );
  TransGraph transGraph = spoon.getActiveTransGraph();
  IMetaStore metaStore = spoon.getMetaStore();
  if ( transGraph == null ) {
    return;
  }
  StepMeta stepMeta = transGraph.getCurrentStep();
  TransMeta transMeta = spoon.getActiveTransformation();
  if ( stepMeta == null || transMeta == null ) {
    return;
  }
  if ( checkTestPresent( spoon, transMeta ) ) {
    return;
  }

  try {
    TransUnitTest unitTest = getCurrentUnitTest( transMeta );
    TransUnitTestTweak unitTestTweak = unitTest.findTweak( stepMeta.getName() );
    if ( unitTestTweak != null ) {
      unitTest.getTweaks().remove( unitTestTweak );
    }
    if ( enable ) {
      unitTest.getTweaks().add( new TransUnitTestTweak( stepTweak, stepMeta.getName() ) );
    }

    saveUnitTest( getHierarchy().getTestFactory(), unitTest, transMeta );

    spoon.refreshGraph();

  } catch ( Exception exception ) {
    new ErrorDialog( spoon.getShell(), "Error", "Error tweaking transformation unit test on step '" + stepMeta.getName() + "' with operation " + stepTweak.name(), exception );
  }
}
 
Example 10
Source File: BeamBigQueryOutputStepHandler.java    From kettle-beam with Apache License 2.0 5 votes vote down vote up
@Override public void handleStep( LogChannelInterface log, StepMeta beamOutputStepMeta, Map<String, PCollection<KettleRow>> stepCollectionMap,
                                  Pipeline pipeline, RowMetaInterface rowMeta, List<StepMeta> previousSteps,
                                  PCollection<KettleRow> input  ) throws KettleException {

  BeamBQOutputMeta beamOutputMeta = (BeamBQOutputMeta) beamOutputStepMeta.getStepMetaInterface();

  BeamBQOutputTransform beamOutputTransform = new BeamBQOutputTransform(
    beamOutputStepMeta.getName(),
    transMeta.environmentSubstitute( beamOutputMeta.getProjectId() ),
    transMeta.environmentSubstitute( beamOutputMeta.getDatasetId() ),
    transMeta.environmentSubstitute( beamOutputMeta.getTableId() ),
    beamOutputMeta.isCreatingIfNeeded(),
    beamOutputMeta.isTruncatingTable(),
    beamOutputMeta.isFailingIfNotEmpty(),
    JsonRowMeta.toJson(rowMeta),
    stepPluginClasses,
    xpPluginClasses
  );

  // Which step do we apply this transform to?
  // Ignore info hops until we figure that out.
  //
  if ( previousSteps.size() > 1 ) {
    throw new KettleException( "Combining data from multiple steps is not supported yet!" );
  }
  StepMeta previousStep = previousSteps.get( 0 );

  // No need to store this, it's PDone.
  //
  input.apply( beamOutputTransform );
  log.logBasic( "Handled step (BQ OUTPUT) : " + beamOutputStepMeta.getName() + ", gets data from " + previousStep.getName() );
}
 
Example 11
Source File: BeamSubscriberStepHandler.java    From kettle-beam with Apache License 2.0 5 votes vote down vote up
@Override public void handleStep( LogChannelInterface log, StepMeta stepMeta, Map<String, PCollection<KettleRow>> stepCollectionMap,
                                  Pipeline pipeline, RowMetaInterface rowMeta, List<StepMeta> previousSteps,
                                  PCollection<KettleRow> input ) throws KettleException {

  // A Beam subscriber step
  //
  BeamSubscribeMeta inputMeta = (BeamSubscribeMeta) stepMeta.getStepMetaInterface();

  RowMetaInterface outputRowMeta = transMeta.getStepFields( stepMeta );
  String rowMetaJson = JsonRowMeta.toJson( outputRowMeta );

  // Verify some things:
  //
  if ( StringUtils.isEmpty( inputMeta.getTopic() ) ) {
    throw new KettleException( "Please specify a topic to read from in Beam Pub/Sub Subscribe step '" + stepMeta.getName() + "'" );
  }

  BeamSubscribeTransform subscribeTransform = new BeamSubscribeTransform(
    stepMeta.getName(),
    stepMeta.getName(),
    transMeta.environmentSubstitute( inputMeta.getSubscription() ),
    transMeta.environmentSubstitute( inputMeta.getTopic() ),
    inputMeta.getMessageType(),
    rowMetaJson,
    stepPluginClasses,
    xpPluginClasses
  );

  PCollection<KettleRow> afterInput = pipeline.apply( subscribeTransform );
  stepCollectionMap.put( stepMeta.getName(), afterInput );

  log.logBasic( "Handled step (SUBSCRIBE) : " + stepMeta.getName() );
}
 
Example 12
Source File: BeamGroupByStepHandler.java    From kettle-beam with Apache License 2.0 5 votes vote down vote up
@Override public void handleStep( LogChannelInterface log, StepMeta stepMeta, Map<String, PCollection<KettleRow>> stepCollectionMap,
                                  Pipeline pipeline, RowMetaInterface rowMeta, List<StepMeta> previousSteps,
                                  PCollection<KettleRow> input ) throws KettleException {

  MemoryGroupByMeta groupByMeta = (MemoryGroupByMeta) stepMeta.getStepMetaInterface();

  String[] aggregates = new String[ groupByMeta.getAggregateType().length ];
  for ( int i = 0; i < aggregates.length; i++ ) {
    aggregates[ i ] = MemoryGroupByMeta.getTypeDesc( groupByMeta.getAggregateType()[ i ] );
  }

  PTransform<PCollection<KettleRow>, PCollection<KettleRow>> stepTransform = new GroupByTransform(
    stepMeta.getName(),
    JsonRowMeta.toJson( rowMeta ),  // The io row
    stepPluginClasses,
    xpPluginClasses,
    groupByMeta.getGroupField(),
    groupByMeta.getSubjectField(),
    aggregates,
    groupByMeta.getAggregateField()
  );

  // Apply the step transform to the previous io step PCollection(s)
  //
  PCollection<KettleRow> stepPCollection = input.apply( stepMeta.getName(), stepTransform );

  // Save this in the map
  //
  stepCollectionMap.put( stepMeta.getName(), stepPCollection );
  log.logBasic( "Handled Group By (STEP) : " + stepMeta.getName() + ", gets data from " + previousSteps.size() + " previous step(s)" );
}
 
Example 13
Source File: StepsMetricsDialog.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
private void getInfo( StepsMetricsMeta in ) {
  stepname = wStepname.getText(); // return value
  int nrsteps = wFields.nrNonEmpty();
  in.allocate( nrsteps );
  for ( int i = 0; i < nrsteps; i++ ) {
    TableItem ti = wFields.getNonEmpty( i );
    StepMeta tm = transMeta.findStep( ti.getText( 1 ) );
    //CHECKSTYLE:Indentation:OFF
    if ( tm != null ) {
      in.getStepName()[i] = tm.getName();
      in.getStepCopyNr()[i] = "" + Const.toInt( ti.getText( 2 ), 0 );
      in.getStepRequired()[i] = in.getRequiredStepsCode( ti.getText( 3 ) );
    }

  }

  in.setStepNameFieldName( wStepnameField.getText() );
  in.setStepIdFieldName( wStepidField.getText() );
  in.setStepLinesInputFieldName( wLinesinputField.getText() );
  in.setStepLinesOutputFieldName( wLinesoutputField.getText() );
  in.setStepLinesReadFieldName( wLinesreadField.getText() );
  in.setStepLinesWrittenFieldName( wLineswrittenField.getText() );
  in.setStepLinesUpdatedFieldName( wLinesupdatedField.getText() );
  in.setStepLinesErrorsFieldName( wLineserrorsField.getText() );
  in.setStepSecondsFieldName( wSecondsField.getText() );

}
 
Example 14
Source File: BeamOutputStepHandler.java    From kettle-beam with Apache License 2.0 4 votes vote down vote up
@Override public void handleStep( LogChannelInterface log, StepMeta beamOutputStepMeta, Map<String, PCollection<KettleRow>> stepCollectionMap,
                                  Pipeline pipeline, RowMetaInterface rowMeta, List<StepMeta> previousSteps,
                                  PCollection<KettleRow> input ) throws KettleException {

  BeamOutputMeta beamOutputMeta = (BeamOutputMeta) beamOutputStepMeta.getStepMetaInterface();
  FileDefinition outputFileDefinition;
  if ( StringUtils.isEmpty( beamOutputMeta.getFileDescriptionName() ) ) {
    // Create a default file definition using standard output and sane defaults...
    //
    outputFileDefinition = getDefaultFileDefition( beamOutputStepMeta );
  } else {
    outputFileDefinition = beamOutputMeta.loadFileDefinition( metaStore );
  }

  // Empty file definition? Add all fields in the output
  //
  addAllFieldsToEmptyFileDefinition( rowMeta, outputFileDefinition );

  // Apply the output transform from KettleRow to PDone
  //
  if ( outputFileDefinition == null ) {
    throw new KettleException( "We couldn't find or load the Beam Output step file definition" );
  }
  if ( rowMeta == null || rowMeta.isEmpty() ) {
    throw new KettleException( "No output fields found in the file definition or from previous steps" );
  }

  BeamOutputTransform beamOutputTransform = new BeamOutputTransform(
    beamOutputStepMeta.getName(),
    transMeta.environmentSubstitute( beamOutputMeta.getOutputLocation() ),
    transMeta.environmentSubstitute( beamOutputMeta.getFilePrefix() ),
    transMeta.environmentSubstitute( beamOutputMeta.getFileSuffix() ),
    transMeta.environmentSubstitute( outputFileDefinition.getSeparator() ),
    transMeta.environmentSubstitute( outputFileDefinition.getEnclosure() ),
    beamOutputMeta.isWindowed(),
    JsonRowMeta.toJson( rowMeta ),
    stepPluginClasses,
    xpPluginClasses
  );

  // Which step do we apply this transform to?
  // Ignore info hops until we figure that out.
  //
  if ( previousSteps.size() > 1 ) {
    throw new KettleException( "Combining data from multiple steps is not supported yet!" );
  }
  StepMeta previousStep = previousSteps.get( 0 );

  // No need to store this, it's PDone.
  //
  input.apply( beamOutputTransform );
  log.logBasic( "Handled step (OUTPUT) : " + beamOutputStepMeta.getName() + ", gets data from " + previousStep.getName() );
}
 
Example 15
Source File: IngresVectorwiseLoaderMeta.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
@Override
public SQLStatement getSQLStatements( TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev,
  Repository repository, IMetaStore metaStore ) {
  SQLStatement retval = new SQLStatement( stepMeta.getName(), databaseMeta, null ); // default:
                                                                                    // nothing
                                                                                    // to
                                                                                    // do!

  if ( databaseMeta != null ) {
    if ( prev != null && prev.size() > 0 ) {
      if ( !Utils.isEmpty( tablename ) ) {
        Database db = new Database( loggingObject, databaseMeta );
        db.shareVariablesWith( transMeta );
        try {
          db.connect();

          String schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename );
          String cr_table = db.getDDL( schemaTable, prev );

          // Squeeze in the VECTORWISE col store clause...
          // TODO: move this to the database dialog and make it user
          // configurable.
          //
          String VW_CLAUSE = "WITH STRUCTURE=VECTORWISE";

          if ( cr_table.toUpperCase().contains( "CREATE TABLE" ) ) {
            int scIndex = cr_table.indexOf( ';' );
            if ( scIndex < 0 ) {
              cr_table += VW_CLAUSE;
            } else {
              cr_table = cr_table.substring( 0, scIndex ) + VW_CLAUSE + cr_table.substring( scIndex );
            }
          }

          // Empty string means: nothing to do: set it to null...
          if ( cr_table == null || cr_table.length() == 0 ) {
            cr_table = null;
          }

          retval.setSQL( cr_table );
        } catch ( KettleDatabaseException dbe ) {
          retval.setError( BaseMessages.getString( PKG, "IngresVectorWiseLoaderMeta.Error.ErrorConnecting", dbe
            .getMessage() ) );
        } finally {
          db.disconnect();
        }
      } else {
        retval.setError( BaseMessages.getString( PKG, "IngresVectorWiseLoaderMeta.Error.NoTable" ) );
      }
    } else {
      retval.setError( BaseMessages.getString( PKG, "IngresVectorWiseLoaderMeta.Error.NoInput" ) );
    }
  } else {
    retval.setError( BaseMessages.getString( PKG, "IngresVectorWiseLoaderMeta.Error.NoConnection" ) );
  }

  return retval;
}
 
Example 16
Source File: DeleteMeta.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public SQLStatement getSQLStatements( TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev,
  Repository repository, IMetaStore metaStore ) {
  SQLStatement retval = new SQLStatement( stepMeta.getName(), databaseMeta, null ); // default: nothing to do!

  if ( databaseMeta != null ) {
    if ( prev != null && prev.size() > 0 ) {
      if ( !Utils.isEmpty( tableName ) ) {
        Database db = new Database( loggingObject, databaseMeta );
        db.shareVariablesWith( transMeta );
        try {
          db.connect();

          String schemaTable = databaseMeta.getQuotedSchemaTableCombination( schemaName, tableName );
          String cr_table = db.getDDL( schemaTable, prev, null, false, null, true );

          String cr_index = "";
          String[] idx_fields = null;

          if ( keyLookup != null && keyLookup.length > 0 ) {
            idx_fields = new String[keyLookup.length];
            for ( int i = 0; i < keyLookup.length; i++ ) {
              idx_fields[i] = keyLookup[i];
            }
          } else {
            retval.setError( BaseMessages.getString( PKG, "DeleteMeta.CheckResult.KeyFieldsRequired" ) );
          }

          // Key lookup dimensions...
          if ( idx_fields != null && idx_fields.length > 0 && !db.checkIndexExists( schemaTable, idx_fields ) ) {
            String indexname = "idx_" + tableName + "_lookup";
            cr_index =
              db.getCreateIndexStatement(
                schemaName, tableName, indexname, idx_fields, false, false, false, true );
          }

          String sql = cr_table + cr_index;
          if ( sql.length() == 0 ) {
            retval.setSQL( null );
          } else {
            retval.setSQL( sql );
          }
        } catch ( KettleException e ) {
          retval.setError( BaseMessages.getString( PKG, "DeleteMeta.Returnvalue.ErrorOccurred" )
            + e.getMessage() );
        }
      } else {
        retval.setError( BaseMessages.getString( PKG, "DeleteMeta.Returnvalue.NoTableDefinedOnConnection" ) );
      }
    } else {
      retval.setError( BaseMessages.getString( PKG, "DeleteMeta.Returnvalue.NoReceivingAnyFields" ) );
    }
  } else {
    retval.setError( BaseMessages.getString( PKG, "DeleteMeta.Returnvalue.NoConnectionDefined" ) );
  }

  return retval;
}
 
Example 17
Source File: Trans.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
/**
 * Adds a step performance snapshot.
 */
protected void addStepPerformanceSnapShot() {

  if ( stepPerformanceSnapShots == null ) {
    return; // Race condition somewhere?
  }

  boolean pausedAndNotEmpty = isPaused() && !stepPerformanceSnapShots.isEmpty();
  boolean stoppedAndNotEmpty = isStopped() && !stepPerformanceSnapShots.isEmpty();

  if ( transMeta.isCapturingStepPerformanceSnapShots() && !pausedAndNotEmpty && !stoppedAndNotEmpty ) {
    // get the statistics from the steps and keep them...
    //
    int seqNr = stepPerformanceSnapshotSeqNr.incrementAndGet();
    for ( int i = 0; i < steps.size(); i++ ) {
      StepMeta stepMeta = steps.get( i ).stepMeta;
      StepInterface step = steps.get( i ).step;

      StepPerformanceSnapShot snapShot =
        new StepPerformanceSnapShot( seqNr, getBatchId(), new Date(), getName(), stepMeta.getName(), step.getCopy(),
          step.getLinesRead(), step.getLinesWritten(), step.getLinesInput(), step.getLinesOutput(), step
          .getLinesUpdated(), step.getLinesRejected(), step.getErrors() );

      synchronized ( stepPerformanceSnapShots ) {
        List<StepPerformanceSnapShot> snapShotList = stepPerformanceSnapShots.get( step.toString() );
        StepPerformanceSnapShot previous;
        if ( snapShotList == null ) {
          snapShotList = new ArrayList<>();
          stepPerformanceSnapShots.put( step.toString(), snapShotList );
          previous = null;
        } else {
          previous = snapShotList.get( snapShotList.size() - 1 ); // the last one...
        }
        // Make the difference...
        //
        snapShot.diff( previous, step.rowsetInputSize(), step.rowsetOutputSize() );
        snapShotList.add( snapShot );

        if ( stepPerformanceSnapshotSizeLimit > 0 && snapShotList.size() > stepPerformanceSnapshotSizeLimit ) {
          snapShotList.remove( 0 );
        }
      }
    }

    lastStepPerformanceSnapshotSeqNrAdded = stepPerformanceSnapshotSeqNr.get();
  }
}
 
Example 18
Source File: LucidDBBulkLoaderMeta.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public SQLStatement getSQLStatements( TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev,
  Repository repository, IMetaStore metaStore ) throws KettleStepException {
  SQLStatement retval = new SQLStatement( stepMeta.getName(), databaseMeta, null ); // default: nothing to do!

  if ( databaseMeta != null ) {
    if ( prev != null && prev.size() > 0 ) {
      // Copy the row
      RowMetaInterface tableFields = new RowMeta();

      // Now change the field names
      for ( int i = 0; i < fieldTable.length; i++ ) {
        ValueMetaInterface v = prev.searchValueMeta( fieldStream[i] );
        if ( v != null ) {
          ValueMetaInterface tableField = v.clone();
          tableField.setName( fieldTable[i] );
          tableFields.addValueMeta( tableField );
        } else {
          throw new KettleStepException( "Unable to find field [" + fieldStream[i] + "] in the input rows" );
        }
      }

      if ( !Utils.isEmpty( tableName ) ) {
        Database db = new Database( loggingObject, databaseMeta );
        db.shareVariablesWith( transMeta );
        try {
          db.connect();

          String schemaTable =
            databaseMeta.getQuotedSchemaTableCombination(
              transMeta.environmentSubstitute( schemaName ), transMeta.environmentSubstitute( tableName ) );
          String sql = db.getDDL( schemaTable, tableFields, null, false, null, true );

          if ( Utils.isEmpty( sql ) ) {
            retval.setSQL( null );
          } else {
            retval.setSQL( sql );
          }
        } catch ( KettleException e ) {
          retval.setError( BaseMessages.getString( PKG, "LucidDBBulkLoaderMeta.GetSQL.ErrorOccurred" )
            + e.getMessage() );
        }
      } else {
        retval
          .setError( BaseMessages.getString( PKG, "LucidDBBulkLoaderMeta.GetSQL.NoTableDefinedOnConnection" ) );
      }
    } else {
      retval.setError( BaseMessages.getString( PKG, "LucidDBBulkLoaderMeta.GetSQL.NotReceivingAnyFields" ) );
    }
  } else {
    retval.setError( BaseMessages.getString( PKG, "LucidDBBulkLoaderMeta.GetSQL.NoConnectionDefined" ) );
  }

  return retval;
}
 
Example 19
Source File: UpdateMeta.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
@Override
public SQLStatement getSQLStatements( TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev,
  Repository repository, IMetaStore metaStore ) throws KettleStepException  {
  SQLStatement retval = new SQLStatement( stepMeta.getName(), databaseMeta, null ); // default: nothing to do!

  if ( databaseMeta != null ) {
    if ( prev != null && prev.size() > 0 ) {
      // Copy the row
      RowMetaInterface tableFields = RowMetaUtils.getRowMetaForUpdate( prev, keyLookup, keyStream,
          updateLookup, updateStream );
      if ( !Utils.isEmpty( tableName ) ) {
        String schemaTable = databaseMeta.getQuotedSchemaTableCombination( schemaName, tableName );

        Database db = new Database( loggingObject, databaseMeta );
        db.shareVariablesWith( transMeta );
        try {
          db.connect();

          if ( getIgnoreFlagField() != null && getIgnoreFlagField().length() > 0 ) {
            prev.addValueMeta( new ValueMetaBoolean( getIgnoreFlagField() ) );
          }

          String cr_table = db.getDDL( schemaTable, tableFields, null, false, null, true );

          String cr_index = "";
          String[] idx_fields = null;

          if ( keyLookup != null && keyLookup.length > 0 ) {
            idx_fields = new String[keyLookup.length];
            for ( int i = 0; i < keyLookup.length; i++ ) {
              idx_fields[i] = keyLookup[i];
            }
          } else {
            retval.setError( BaseMessages.getString( PKG, "UpdateMeta.CheckResult.MissingKeyFields" ) );
          }

          // Key lookup dimensions...
          if ( idx_fields != null
            && idx_fields.length > 0 && !db.checkIndexExists( schemaTable, idx_fields ) ) {
            String indexname = "idx_" + tableName + "_lookup";
            cr_index =
              db.getCreateIndexStatement(
                schemaTable, indexname, idx_fields, false, false, false, true );
          }

          String sql = cr_table + cr_index;
          if ( sql.length() == 0 ) {
            retval.setSQL( null );
          } else {
            retval.setSQL( sql );
          }
        } catch ( KettleException e ) {
          retval.setError( BaseMessages.getString( PKG, "UpdateMeta.ReturnValue.ErrorOccurred" )
            + e.getMessage() );
        }
      } else {
        retval.setError( BaseMessages.getString( PKG, "UpdateMeta.ReturnValue.NoTableDefinedOnConnection" ) );
      }
    } else {
      retval.setError( BaseMessages.getString( PKG, "UpdateMeta.ReturnValue.NotReceivingAnyFields" ) );
    }
  } else {
    retval.setError( BaseMessages.getString( PKG, "UpdateMeta.ReturnValue.NoConnectionDefined" ) );
  }

  return retval;
}
 
Example 20
Source File: BeamKafkaInputStepHandler.java    From kettle-beam with Apache License 2.0 4 votes vote down vote up
@Override public void handleStep( LogChannelInterface log, StepMeta stepMeta, Map<String, PCollection<KettleRow>> stepCollectionMap,
                                  Pipeline pipeline, RowMetaInterface rowMeta, List<StepMeta> previousSteps,
                                  PCollection<KettleRow> input ) throws KettleException {

  // Input handling
  //
  BeamConsumeMeta beamConsumeMeta = (BeamConsumeMeta) stepMeta.getStepMetaInterface();

  // Output rows (fields selection)
  //
  RowMetaInterface outputRowMeta = new RowMeta();
  beamConsumeMeta.getFields( outputRowMeta, stepMeta.getName(), null, null, transMeta, null, null );

  String[] parameters = new String[beamConsumeMeta.getConfigOptions().size()];
  String[] values = new String[beamConsumeMeta.getConfigOptions().size()];
  String[] types = new String[beamConsumeMeta.getConfigOptions().size()];
  for (int i=0;i<parameters.length;i++) {
    ConfigOption option = beamConsumeMeta.getConfigOptions().get( i );
    parameters[i] = transMeta.environmentSubstitute( option.getParameter() );
    values[i] = transMeta.environmentSubstitute( option.getValue() );
    types[i] = option.getType()==null ? ConfigOption.Type.String.name() : option.getType().name();
  }

  BeamKafkaInputTransform beamInputTransform = new BeamKafkaInputTransform(
    stepMeta.getName(),
    stepMeta.getName(),
    transMeta.environmentSubstitute( beamConsumeMeta.getBootstrapServers() ),
    transMeta.environmentSubstitute( beamConsumeMeta.getTopics() ),
    transMeta.environmentSubstitute( beamConsumeMeta.getGroupId() ),
    beamConsumeMeta.isUsingProcessingTime(),
    beamConsumeMeta.isUsingLogAppendTime(),
    beamConsumeMeta.isUsingCreateTime(),
    beamConsumeMeta.isRestrictedToCommitted(),
    beamConsumeMeta.isAllowingCommitOnConsumedOffset(),
    parameters,
    values,
    types,
    JsonRowMeta.toJson( outputRowMeta ),
    stepPluginClasses,
    xpPluginClasses
  );
  PCollection<KettleRow> afterInput = pipeline.apply( beamInputTransform );
  stepCollectionMap.put( stepMeta.getName(), afterInput );
  log.logBasic( "Handled step (KAFKA INPUT) : " + stepMeta.getName() );
}