Java Code Examples for org.pentaho.di.core.logging.LogChannelInterface#logBasic()

The following examples show how to use org.pentaho.di.core.logging.LogChannelInterface#logBasic() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ConnectionPoolUtil.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
/**
 * This methods adds a new data source to cache
 *
 * @param log
 * @param databaseMeta
 * @param partitionId
 * @param initialSize
 * @param maximumSize
 * @throws KettleDatabaseException
 */
private static void addPoolableDataSource( LogChannelInterface log, DatabaseMeta databaseMeta, String partitionId,
    int initialSize, int maximumSize ) throws KettleDatabaseException {
  if ( log.isBasic() ) {
    log.logBasic( BaseMessages.getString( PKG, "Database.CreatingConnectionPool", databaseMeta.getName() ) );
  }

  BasicDataSource ds = new BasicDataSource();
  configureDataSource( ds, databaseMeta, partitionId, initialSize, maximumSize );
  // check if datasource is valid
  testDataSource( ds );
  // register data source
  dataSources.put( getDataSourceName( databaseMeta, partitionId ), ds );

  if ( log.isBasic() ) {
    log.logBasic( BaseMessages.getString( PKG, "Database.CreatedConnectionPool", databaseMeta.getName() ) );
  }
}
 
Example 2
Source File: MonetDBBulkLoader.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
protected static MapiSocket getMonetDBConnection( String host, int port,
    String user, String password, String db, LogChannelInterface log ) throws Exception {
  MapiSocket mserver = new MapiSocket();
  mserver.setDatabase( db );
  mserver.setLanguage( "sql" );

  List<?> warnings = mserver.connect( host, port, user, password );
  if ( warnings != null ) {
    for ( Object warning : warnings ) {
      if ( log != null ) {
        log.logBasic( "MonetDB connection warning: " + warning );
      }
    }
  } else {
    if ( log != null ) {
      log.logDebug( "Successful MapiSocket connection to MonetDB established." );
    }
  }
  return mserver;
}
 
Example 3
Source File: SlaveServerConfig.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
private void checkNetworkInterfaceSetting( LogChannelInterface log, Node slaveNode, SlaveServer slaveServer ) {
  // See if we need to grab the network interface to use and then override the host name
  //
  String networkInterfaceName = XMLHandler.getTagValue( slaveNode, "network_interface" );
  if ( !Utils.isEmpty( networkInterfaceName ) ) {
    // OK, so let's try to get the IP address for this network interface...
    //
    try {
      String newHostname = Const.getIPAddress( networkInterfaceName );
      if ( newHostname != null ) {
        slaveServer.setHostname( newHostname );
        // Also change the name of the slave...
        //
        slaveServer.setName( slaveServer.getName() + "-" + newHostname );
        log.logBasic( "Hostname for slave server ["
          + slaveServer.getName() + "] is set to [" + newHostname + "], information derived from network "
          + networkInterfaceName );
      }
    } catch ( SocketException e ) {
      log.logError( "Unable to get the IP address for network interface "
        + networkInterfaceName + " for slave server [" + slaveServer.getName() + "]", e );
    }
  }

}
 
Example 4
Source File: BeamBigQueryInputStepHandler.java    From kettle-beam with Apache License 2.0 5 votes vote down vote up
@Override public void handleStep( LogChannelInterface log, StepMeta stepMeta, Map<String, PCollection<KettleRow>> stepCollectionMap,
                                  Pipeline pipeline, RowMetaInterface rowMeta, List<StepMeta> previousSteps,
                                  PCollection<KettleRow> input ) throws KettleException {

  // Input handling
  //
  BeamBQInputMeta beamInputMeta = (BeamBQInputMeta) stepMeta.getStepMetaInterface();

  // Output rows (fields selection)
  //
  RowMetaInterface outputRowMeta = new RowMeta();
  beamInputMeta.getFields( outputRowMeta, stepMeta.getName(), null, null, transMeta, null, null );

  BeamBQInputTransform beamInputTransform = new BeamBQInputTransform(
    stepMeta.getName(),
    stepMeta.getName(),
    transMeta.environmentSubstitute( beamInputMeta.getProjectId() ),
    transMeta.environmentSubstitute( beamInputMeta.getDatasetId() ),
    transMeta.environmentSubstitute( beamInputMeta.getTableId() ),
    transMeta.environmentSubstitute( beamInputMeta.getQuery() ),
    JsonRowMeta.toJson( outputRowMeta ),
    stepPluginClasses,
    xpPluginClasses
  );
  PCollection<KettleRow> afterInput = pipeline.apply( beamInputTransform );
  stepCollectionMap.put( stepMeta.getName(), afterInput );
  log.logBasic( "Handled step (BQ INPUT) : " + stepMeta.getName() );

}
 
Example 5
Source File: TransWebSocketEngineAdapter.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
private void logToChannel( LogChannelInterface logChannel, LogEntry data ) {
  LogLevel logLogLevel = data.getLogLogLevel();
  switch ( logLogLevel ) {
    case ERROR:
      if ( data.getThrowable() != null ) {
        logChannel.logError( data.getMessage(), data.getThrowable() );
      } else {
        logChannel.logError( data.getMessage() );
      }
      break;
    case MINIMAL:
      logChannel.logMinimal( data.getMessage() );
      break;
    case BASIC:
      logChannel.logBasic( data.getMessage() );
      break;
    case DETAILED:
      logChannel.logDetailed( data.getMessage() );
      break;
    case DEBUG:
      logChannel.logDebug( data.getMessage() );
      break;
    case TRACE:
      logChannel.logRowlevel( data.getMessage() );
      break;
  }
}
 
Example 6
Source File: TransMetaPipelineConverter.java    From kettle-beam with Apache License 2.0 5 votes vote down vote up
public Pipeline createPipeline( PipelineOptions pipelineOptions ) throws Exception {

    LogChannelInterface log = LogChannel.GENERAL;

    // Create a new Pipeline
    //
    RunnerType runnerType = RunnerType.getRunnerTypeByName( beamJobConfig.getRunnerTypeName() );
    Class<? extends PipelineRunner<?>> runnerClass = getPipelineRunnerClass(runnerType);

    pipelineOptions.setRunner( runnerClass );
    Pipeline pipeline = Pipeline.create( pipelineOptions );

    pipeline.getCoderRegistry().registerCoderForClass( KettleRow.class, new KettleRowCoder() );

    log.logBasic( "Created pipeline job with name '" + pipelineOptions.getJobName() + "'" );

    // Keep track of which step outputs which Collection
    //
    Map<String, PCollection<KettleRow>> stepCollectionMap = new HashMap<>();

    // Handle io
    //
    handleBeamInputSteps( log, stepCollectionMap, pipeline );

    // Transform all the other steps...
    //
    handleGenericStep( stepCollectionMap, pipeline );

    // Output handling
    //
    handleBeamOutputSteps( log, stepCollectionMap, pipeline );

    return pipeline;
  }
 
Example 7
Source File: BeamBigQueryOutputStepHandler.java    From kettle-beam with Apache License 2.0 5 votes vote down vote up
@Override public void handleStep( LogChannelInterface log, StepMeta beamOutputStepMeta, Map<String, PCollection<KettleRow>> stepCollectionMap,
                                  Pipeline pipeline, RowMetaInterface rowMeta, List<StepMeta> previousSteps,
                                  PCollection<KettleRow> input  ) throws KettleException {

  BeamBQOutputMeta beamOutputMeta = (BeamBQOutputMeta) beamOutputStepMeta.getStepMetaInterface();

  BeamBQOutputTransform beamOutputTransform = new BeamBQOutputTransform(
    beamOutputStepMeta.getName(),
    transMeta.environmentSubstitute( beamOutputMeta.getProjectId() ),
    transMeta.environmentSubstitute( beamOutputMeta.getDatasetId() ),
    transMeta.environmentSubstitute( beamOutputMeta.getTableId() ),
    beamOutputMeta.isCreatingIfNeeded(),
    beamOutputMeta.isTruncatingTable(),
    beamOutputMeta.isFailingIfNotEmpty(),
    JsonRowMeta.toJson(rowMeta),
    stepPluginClasses,
    xpPluginClasses
  );

  // Which step do we apply this transform to?
  // Ignore info hops until we figure that out.
  //
  if ( previousSteps.size() > 1 ) {
    throw new KettleException( "Combining data from multiple steps is not supported yet!" );
  }
  StepMeta previousStep = previousSteps.get( 0 );

  // No need to store this, it's PDone.
  //
  input.apply( beamOutputTransform );
  log.logBasic( "Handled step (BQ OUTPUT) : " + beamOutputStepMeta.getName() + ", gets data from " + previousStep.getName() );
}
 
Example 8
Source File: BeamSubscriberStepHandler.java    From kettle-beam with Apache License 2.0 5 votes vote down vote up
@Override public void handleStep( LogChannelInterface log, StepMeta stepMeta, Map<String, PCollection<KettleRow>> stepCollectionMap,
                                  Pipeline pipeline, RowMetaInterface rowMeta, List<StepMeta> previousSteps,
                                  PCollection<KettleRow> input ) throws KettleException {

  // A Beam subscriber step
  //
  BeamSubscribeMeta inputMeta = (BeamSubscribeMeta) stepMeta.getStepMetaInterface();

  RowMetaInterface outputRowMeta = transMeta.getStepFields( stepMeta );
  String rowMetaJson = JsonRowMeta.toJson( outputRowMeta );

  // Verify some things:
  //
  if ( StringUtils.isEmpty( inputMeta.getTopic() ) ) {
    throw new KettleException( "Please specify a topic to read from in Beam Pub/Sub Subscribe step '" + stepMeta.getName() + "'" );
  }

  BeamSubscribeTransform subscribeTransform = new BeamSubscribeTransform(
    stepMeta.getName(),
    stepMeta.getName(),
    transMeta.environmentSubstitute( inputMeta.getSubscription() ),
    transMeta.environmentSubstitute( inputMeta.getTopic() ),
    inputMeta.getMessageType(),
    rowMetaJson,
    stepPluginClasses,
    xpPluginClasses
  );

  PCollection<KettleRow> afterInput = pipeline.apply( subscribeTransform );
  stepCollectionMap.put( stepMeta.getName(), afterInput );

  log.logBasic( "Handled step (SUBSCRIBE) : " + stepMeta.getName() );
}
 
Example 9
Source File: BeamGroupByStepHandler.java    From kettle-beam with Apache License 2.0 5 votes vote down vote up
@Override public void handleStep( LogChannelInterface log, StepMeta stepMeta, Map<String, PCollection<KettleRow>> stepCollectionMap,
                                  Pipeline pipeline, RowMetaInterface rowMeta, List<StepMeta> previousSteps,
                                  PCollection<KettleRow> input ) throws KettleException {

  MemoryGroupByMeta groupByMeta = (MemoryGroupByMeta) stepMeta.getStepMetaInterface();

  String[] aggregates = new String[ groupByMeta.getAggregateType().length ];
  for ( int i = 0; i < aggregates.length; i++ ) {
    aggregates[ i ] = MemoryGroupByMeta.getTypeDesc( groupByMeta.getAggregateType()[ i ] );
  }

  PTransform<PCollection<KettleRow>, PCollection<KettleRow>> stepTransform = new GroupByTransform(
    stepMeta.getName(),
    JsonRowMeta.toJson( rowMeta ),  // The io row
    stepPluginClasses,
    xpPluginClasses,
    groupByMeta.getGroupField(),
    groupByMeta.getSubjectField(),
    aggregates,
    groupByMeta.getAggregateField()
  );

  // Apply the step transform to the previous io step PCollection(s)
  //
  PCollection<KettleRow> stepPCollection = input.apply( stepMeta.getName(), stepTransform );

  // Save this in the map
  //
  stepCollectionMap.put( stepMeta.getName(), stepPCollection );
  log.logBasic( "Handled Group By (STEP) : " + stepMeta.getName() + ", gets data from " + previousSteps.size() + " previous step(s)" );
}
 
Example 10
Source File: BeamInputStepHandler.java    From kettle-beam with Apache License 2.0 5 votes vote down vote up
@Override public void handleStep( LogChannelInterface log, StepMeta stepMeta, Map<String, PCollection<KettleRow>> stepCollectionMap,
                                  Pipeline pipeline, RowMetaInterface rowMeta, List<StepMeta> previousSteps,
                                  PCollection<KettleRow> input ) throws KettleException {

  // Input handling
  //
  BeamInputMeta beamInputMeta = (BeamInputMeta) stepMeta.getStepMetaInterface();
  FileDefinition inputFileDefinition = beamInputMeta.loadFileDefinition( metaStore );
  RowMetaInterface fileRowMeta = inputFileDefinition.getRowMeta();

  // Apply the PBegin to KettleRow transform:
  //
  if ( inputFileDefinition == null ) {
    throw new KettleException( "We couldn't find or load the Beam Input step file definition" );
  }
  String fileInputLocation = transMeta.environmentSubstitute( beamInputMeta.getInputLocation() );

  BeamInputTransform beamInputTransform = new BeamInputTransform(
    stepMeta.getName(),
    stepMeta.getName(),
    fileInputLocation,
    transMeta.environmentSubstitute( inputFileDefinition.getSeparator() ),
    JsonRowMeta.toJson( fileRowMeta ),
    stepPluginClasses,
    xpPluginClasses
  );
  PCollection<KettleRow> afterInput = pipeline.apply( beamInputTransform );
  stepCollectionMap.put( stepMeta.getName(), afterInput );
  log.logBasic( "Handled step (INPUT) : " + stepMeta.getName() );

}
 
Example 11
Source File: BeamKafkaOutputStepHandler.java    From kettle-beam with Apache License 2.0 5 votes vote down vote up
@Override public void handleStep( LogChannelInterface log, StepMeta beamOutputStepMeta, Map<String, PCollection<KettleRow>> stepCollectionMap,
                                  Pipeline pipeline, RowMetaInterface rowMeta, List<StepMeta> previousSteps,
                                  PCollection<KettleRow> input ) throws KettleException {

  BeamProduceMeta beamProduceMeta = (BeamProduceMeta) beamOutputStepMeta.getStepMetaInterface();

  BeamKafkaOutputTransform beamOutputTransform = new BeamKafkaOutputTransform(
    beamOutputStepMeta.getName(),
    transMeta.environmentSubstitute( beamProduceMeta.getBootstrapServers() ),
    transMeta.environmentSubstitute( beamProduceMeta.getTopic() ),
    transMeta.environmentSubstitute( beamProduceMeta.getKeyField() ),
    transMeta.environmentSubstitute( beamProduceMeta.getMessageField() ),
    JsonRowMeta.toJson( rowMeta ),
    stepPluginClasses,
    xpPluginClasses
  );

  // Which step do we apply this transform to?
  // Ignore info hops until we figure that out.
  //
  if ( previousSteps.size() > 1 ) {
    throw new KettleException( "Combining data from multiple steps is not supported yet!" );
  }
  StepMeta previousStep = previousSteps.get( 0 );

  // No need to store this, it's PDone.
  //
  input.apply( beamOutputTransform );
  log.logBasic( "Handled step (KAFKA OUTPUT) : " + beamOutputStepMeta.getName() + ", gets data from " + previousStep.getName() );
}
 
Example 12
Source File: BeamPublisherStepHandler.java    From kettle-beam with Apache License 2.0 5 votes vote down vote up
@Override public void handleStep( LogChannelInterface log, StepMeta stepMeta, Map<String, PCollection<KettleRow>> stepCollectionMap,
                                  Pipeline pipeline, RowMetaInterface rowMeta, List<StepMeta> previousSteps,
                                  PCollection<KettleRow> input ) throws KettleException {

  BeamPublishMeta publishMeta = (BeamPublishMeta) stepMeta.getStepMetaInterface();

  // some validation
  //
  if ( StringUtils.isEmpty( publishMeta.getTopic() ) ) {
    throw new KettleException( "Please specify a topic to publish to in Beam Pub/Sub Publish step '" + stepMeta.getName() + "'" );
  }

  BeamPublishTransform beamOutputTransform = new BeamPublishTransform(
    stepMeta.getName(),
    transMeta.environmentSubstitute( publishMeta.getTopic() ),
    publishMeta.getMessageType(),
    publishMeta.getMessageField(),
    JsonRowMeta.toJson( rowMeta ),
    stepPluginClasses,
    xpPluginClasses
  );

  // Which step do we apply this transform to?
  // Ignore info hops until we figure that out.
  //
  if ( previousSteps.size() > 1 ) {
    throw new KettleException( "Combining data from multiple steps is not supported yet!" );
  }
  StepMeta previousStep = previousSteps.get( 0 );

  // No need to store this, it's PDone.
  //
  input.apply( beamOutputTransform );
  log.logBasic( "Handled step (PUBLISH) : " + stepMeta.getName() + ", gets data from " + previousStep.getName() );
}
 
Example 13
Source File: BeamTimestampStepHandler.java    From kettle-beam with Apache License 2.0 5 votes vote down vote up
@Override public void handleStep( LogChannelInterface log, StepMeta stepMeta, Map<String, PCollection<KettleRow>> stepCollectionMap,
                                  Pipeline pipeline, RowMetaInterface rowMeta, List<StepMeta> previousSteps,
                                  PCollection<KettleRow> input ) throws KettleException {

  BeamTimestampMeta beamTimestampMeta = (BeamTimestampMeta) stepMeta.getStepMetaInterface();

  if ( !beamTimestampMeta.isReadingTimestamp() && StringUtils.isNotEmpty( beamTimestampMeta.getFieldName() ) ) {
    if ( rowMeta.searchValueMeta( beamTimestampMeta.getFieldName() ) == null ) {
      throw new KettleException( "Please specify a valid field name '" + stepMeta.getName() + "'" );
    }
  }

  PCollection<KettleRow> stepPCollection = input.apply( ParDo.of(
    new TimestampFn(
      stepMeta.getName(),
      JsonRowMeta.toJson( rowMeta ),
      transMeta.environmentSubstitute( beamTimestampMeta.getFieldName() ),
      beamTimestampMeta.isReadingTimestamp(),
      stepPluginClasses,
      xpPluginClasses
    ) ) );


  // Save this in the map
  //
  stepCollectionMap.put( stepMeta.getName(), stepPCollection );
  log.logBasic( "Handled step (TIMESTAMP) : " + stepMeta.getName() + ", gets data from " + previousSteps.size() + " previous step(s)" );
}
 
Example 14
Source File: BeamWindowStepHandler.java    From kettle-beam with Apache License 2.0 4 votes vote down vote up
@Override public void handleStep( LogChannelInterface log, StepMeta stepMeta, Map<String, PCollection<KettleRow>> stepCollectionMap,
                                  Pipeline pipeline, RowMetaInterface inputRowMeta, List<StepMeta> previousSteps,
                                  PCollection<KettleRow> input ) throws KettleException {

  BeamWindowMeta beamWindowMeta = (BeamWindowMeta) stepMeta.getStepMetaInterface();

  if ( StringUtils.isEmpty( beamWindowMeta.getWindowType() ) ) {
    throw new KettleException( "Please specify a window type in Beam Window step '" + stepMeta.getName() + "'" );
  }

  String duration = transMeta.environmentSubstitute( beamWindowMeta.getDuration() );
  long durationSeconds = Const.toLong( duration, -1L );

  PCollection<KettleRow> stepPCollection;

  if ( BeamDefaults.WINDOW_TYPE_FIXED.equals( beamWindowMeta.getWindowType() ) ) {

    if ( durationSeconds <= 0 ) {
      throw new KettleException( "Please specify a valid positive window size (duration) for Beam window step '" + stepMeta.getName() + "'" );
    }

    FixedWindows fixedWindows = FixedWindows
      .of( Duration.standardSeconds( durationSeconds ) );
    stepPCollection = input.apply( Window.into( fixedWindows ) );

  } else if ( BeamDefaults.WINDOW_TYPE_SLIDING.equals( beamWindowMeta.getWindowType() ) ) {

    if ( durationSeconds <= 0 ) {
      throw new KettleException( "Please specify a valid positive window size (duration) for Beam window step '" + stepMeta.getName() + "'" );
    }

    String every = transMeta.environmentSubstitute( beamWindowMeta.getEvery() );
    long everySeconds = Const.toLong( every, -1L );

    SlidingWindows slidingWindows = SlidingWindows
      .of( Duration.standardSeconds( durationSeconds ) )
      .every( Duration.standardSeconds( everySeconds ) );
    stepPCollection = input.apply( Window.into( slidingWindows ) );

  } else if ( BeamDefaults.WINDOW_TYPE_SESSION.equals( beamWindowMeta.getWindowType() ) ) {

    if ( durationSeconds < 600 ) {
      throw new KettleException(
        "Please specify a window size (duration) of at least 600 (10 minutes) for Beam window step '" + stepMeta.getName() + "'.  This is the minimum gap between session windows." );
    }

    Sessions sessionWindows = Sessions
      .withGapDuration( Duration.standardSeconds( durationSeconds ) );
    stepPCollection = input.apply( Window.into( sessionWindows ) );

  } else if ( BeamDefaults.WINDOW_TYPE_GLOBAL.equals( beamWindowMeta.getWindowType() ) ) {

    stepPCollection = input.apply( Window.into( new GlobalWindows() ) );

  } else {
    throw new KettleException( "Beam Window type '" + beamWindowMeta.getWindowType() + " is not supported in step '" + stepMeta.getName() + "'" );
  }

  // Now get window information about the window if we asked about it...
  //
  if ( StringUtils.isNotEmpty( beamWindowMeta.getStartWindowField() ) ||
    StringUtils.isNotEmpty( beamWindowMeta.getEndWindowField() ) ||
    StringUtils.isNotEmpty( beamWindowMeta.getMaxWindowField() ) ) {

    WindowInfoFn windowInfoFn = new WindowInfoFn(
      stepMeta.getName(),
      transMeta.environmentSubstitute( beamWindowMeta.getMaxWindowField() ),
      transMeta.environmentSubstitute( beamWindowMeta.getStartWindowField() ),
      transMeta.environmentSubstitute( beamWindowMeta.getMaxWindowField() ),
      JsonRowMeta.toJson( inputRowMeta ),
      stepPluginClasses,
      xpPluginClasses
    );

    stepPCollection = stepPCollection.apply( ParDo.of( windowInfoFn ) );
  }

  // Save this in the map
  //
  stepCollectionMap.put( stepMeta.getName(), stepPCollection );
  log.logBasic( "Handled step (WINDOW) : " + stepMeta.getName() + ", gets data from " + previousSteps.size() + " previous step(s)" );
}
 
Example 15
Source File: BeamOutputStepHandler.java    From kettle-beam with Apache License 2.0 4 votes vote down vote up
@Override public void handleStep( LogChannelInterface log, StepMeta beamOutputStepMeta, Map<String, PCollection<KettleRow>> stepCollectionMap,
                                  Pipeline pipeline, RowMetaInterface rowMeta, List<StepMeta> previousSteps,
                                  PCollection<KettleRow> input ) throws KettleException {

  BeamOutputMeta beamOutputMeta = (BeamOutputMeta) beamOutputStepMeta.getStepMetaInterface();
  FileDefinition outputFileDefinition;
  if ( StringUtils.isEmpty( beamOutputMeta.getFileDescriptionName() ) ) {
    // Create a default file definition using standard output and sane defaults...
    //
    outputFileDefinition = getDefaultFileDefition( beamOutputStepMeta );
  } else {
    outputFileDefinition = beamOutputMeta.loadFileDefinition( metaStore );
  }

  // Empty file definition? Add all fields in the output
  //
  addAllFieldsToEmptyFileDefinition( rowMeta, outputFileDefinition );

  // Apply the output transform from KettleRow to PDone
  //
  if ( outputFileDefinition == null ) {
    throw new KettleException( "We couldn't find or load the Beam Output step file definition" );
  }
  if ( rowMeta == null || rowMeta.isEmpty() ) {
    throw new KettleException( "No output fields found in the file definition or from previous steps" );
  }

  BeamOutputTransform beamOutputTransform = new BeamOutputTransform(
    beamOutputStepMeta.getName(),
    transMeta.environmentSubstitute( beamOutputMeta.getOutputLocation() ),
    transMeta.environmentSubstitute( beamOutputMeta.getFilePrefix() ),
    transMeta.environmentSubstitute( beamOutputMeta.getFileSuffix() ),
    transMeta.environmentSubstitute( outputFileDefinition.getSeparator() ),
    transMeta.environmentSubstitute( outputFileDefinition.getEnclosure() ),
    beamOutputMeta.isWindowed(),
    JsonRowMeta.toJson( rowMeta ),
    stepPluginClasses,
    xpPluginClasses
  );

  // Which step do we apply this transform to?
  // Ignore info hops until we figure that out.
  //
  if ( previousSteps.size() > 1 ) {
    throw new KettleException( "Combining data from multiple steps is not supported yet!" );
  }
  StepMeta previousStep = previousSteps.get( 0 );

  // No need to store this, it's PDone.
  //
  input.apply( beamOutputTransform );
  log.logBasic( "Handled step (OUTPUT) : " + beamOutputStepMeta.getName() + ", gets data from " + previousStep.getName() );
}
 
Example 16
Source File: ValidateTransUnitTestExtensionPoint.java    From pentaho-pdi-dataset with Apache License 2.0 4 votes vote down vote up
@Override
public void callExtensionPoint( LogChannelInterface log, Object object ) throws KettleException {
  if ( !( object instanceof Trans ) ) {
    return;
  }

  final Trans trans = (Trans) object;
  final TransMeta transMeta = trans.getTransMeta();
  boolean runUnitTest = "Y".equalsIgnoreCase( transMeta.getVariable( DataSetConst.VAR_RUN_UNIT_TEST ) );
  if ( !runUnitTest ) {
    return;
  }

  // We should always have a unit test name here...
  String unitTestName = transMeta.getVariable( DataSetConst.VAR_UNIT_TEST_NAME );
  if ( StringUtil.isEmpty( unitTestName ) ) {
    return;
  }

  try {
    IMetaStore metaStore = transMeta.getMetaStore();
    Repository repository = transMeta.getRepository();

    if ( metaStore == null ) {
      return; // Nothing to do here, we can't reference data sets.
    }

    List<DatabaseMeta> databases = DataSetConst.getAvailableDatabases( repository, transMeta.getSharedObjects() );
    FactoriesHierarchy factoriesHierarchy = new FactoriesHierarchy( metaStore, databases );

    // If the transformation has a variable set with the unit test in it, we're dealing with a unit test situation.
    //
    TransUnitTest unitTest = factoriesHierarchy.getTestFactory().loadElement( unitTestName );

    final List<UnitTestResult> results = new ArrayList<UnitTestResult>();
    trans.getExtensionDataMap().put( DataSetConst.UNIT_TEST_RESULTS, results );


    // Validate execution results with what's in the data sets...
    //
    int errors = DataSetConst.validateTransResultAgainstUnitTest( trans, unitTest, factoriesHierarchy, results );
    if ( errors == 0 ) {
      log.logBasic( "Unit test '" + unitTest.getName() + "' passed succesfully" );
    } else {
      log.logBasic( "Unit test '" + unitTest.getName() + "' failed, " + errors + " errors detected, " + results.size() + " comments to report." );

      String dontShowResults = transMeta.getVariable( DataSetConst.VAR_DO_NOT_SHOW_UNIT_TEST_ERRORS, "N" );

      final Spoon spoon = Spoon.getInstance();
      if ( spoon != null && "N".equalsIgnoreCase( dontShowResults ) ) {
        spoon.getShell().getDisplay().asyncExec( new Runnable() {
          @Override
          public void run() {
            PreviewRowsDialog dialog = new PreviewRowsDialog( spoon.getShell(), trans, SWT.NONE,
              "Unit test results",
              UnitTestResult.getRowMeta(),
              UnitTestResult.getRowData( results ) );
            dialog.setDynamic( false );
            dialog.setProposingToGetMoreRows( false );
            dialog.setProposingToStop( false );
            dialog.setTitleMessage( "Unit test results", "Here are the results of the unit test validations:" );
            dialog.open();
          }
        } );
      }
    }
    log.logBasic( "----------------------------------------------" );
    for ( UnitTestResult result : results ) {
      if ( result.getDataSetName() != null ) {
        log.logBasic( result.getStepName() + " - " + result.getDataSetName() + " : " + result.getComment() );
      } else {
        log.logBasic( result.getComment() );
      }
    }
    log.logBasic( "----------------------------------------------" );
  } catch ( Throwable e ) {
    log.logError( "Unable to validate unit test/golden rows", e );
  }

}
 
Example 17
Source File: MongoDbOutputData.java    From pentaho-mongodb-plugin with Apache License 2.0 4 votes vote down vote up
/**
 * Apply the supplied index operations to the collection. Indexes can be defined on one or more fields in the
 * document. Operation is either create or drop.
 *
 * @param indexes  a list of index operations
 * @param log      the logging object
 * @param truncate true if the collection was truncated in the current execution - in this case drop operations are
 *                 not necessary
 * @throws MongoException  if something goes wrong
 * @throws KettleException
 */
public void applyIndexes( List<MongoDbOutputMeta.MongoIndex> indexes, LogChannelInterface log, boolean truncate )
  throws MongoException, KettleException, MongoDbException {

  for ( MongoDbOutputMeta.MongoIndex index : indexes ) {
    String[] indexParts = index.m_pathToFields.split( "," ); //$NON-NLS-1$
    BasicDBObject mongoIndex = new BasicDBObject();
    for ( String indexKey : indexParts ) {
      String[] nameAndDirection = indexKey.split( ":" ); //$NON-NLS-1$
      int direction = 1;
      if ( nameAndDirection.length == 2 ) {
        direction = Integer.parseInt( nameAndDirection[ 1 ].trim() );
      }
      String name = nameAndDirection[ 0 ];

      // strip off brackets to get actual object name if terminal object
      // is an array
      if ( name.indexOf( '[' ) > 0 ) {
        name = name.substring( name.indexOf( '[' ) + 1, name.length() );
      }

      mongoIndex.put( name, direction );
    }

    if ( index.m_drop ) {
      if ( truncate ) {
        log.logBasic(
          BaseMessages.getString( PKG, "MongoDbOutput.Messages.TruncateBeforeInsert", index ) ); //$NON-NLS-1$
      } else {
        m_collection.dropIndex( mongoIndex );
      }
      log.logBasic( BaseMessages.getString( PKG, "MongoDbOutput.Messages.DropIndex", index ) ); //$NON-NLS-1$
    } else {
      BasicDBObject options = new BasicDBObject();

      // create indexes in the background
      options.put( "background", true ); //$NON-NLS-1$
      options.put( "unique", index.m_unique ); //$NON-NLS-1$
      options.put( "sparse", index.m_sparse ); //$NON-NLS-1$
      m_collection.createIndex( mongoIndex, options );
      log.logBasic( BaseMessages.getString( PKG, "MongoDbOutput.Messages.CreateIndex", index ) ); //$NON-NLS-1$
    }
  }
}
 
Example 18
Source File: KettleFileTableModel.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public static String getLastExecutionResult( LogChannelInterface log, LoggingObjectInterface parentObject,
  ReportSubjectLocation filename ) throws KettleException {

  LogTableInterface logTable = null;
  if ( filename.isTransformation() ) {
    TransMeta transMeta = TransformationInformation.getInstance().getTransMeta( filename );
    logTable = transMeta.getTransLogTable();
  } else {
    JobMeta jobMeta = JobInformation.getInstance().getJobMeta( filename );
    logTable = jobMeta.getJobLogTable();
  }
  if ( logTable != null && logTable.isDefined() ) {
    DatabaseMeta dbMeta = logTable.getDatabaseMeta();
    Database database = new Database( parentObject, dbMeta );
    try {
      database.connect();
      String sql = "SELECT ";
      sql += dbMeta.quoteField( logTable.getStatusField().getFieldName() ) + ", ";
      sql += dbMeta.quoteField( logTable.getLogDateField().getFieldName() ) + ", ";
      sql += dbMeta.quoteField( logTable.getErrorsField().getFieldName() ) + "";
      sql += " FROM ";
      sql += dbMeta.getQuotedSchemaTableCombination( logTable.getSchemaName(), logTable.getTableName() );
      sql += " ORDER BY " + dbMeta.quoteField( logTable.getLogDateField().getFieldName() ) + " DESC";

      RowMetaAndData oneRow = database.getOneRow( sql );
      String status = oneRow.getString( 0, "?" );
      Date date = oneRow.getDate( 1, null );
      Long nrErrors = oneRow.getInteger( 2 );

      String evaluation;
      if ( status.equalsIgnoreCase( LogStatus.END.getStatus() ) ) {
        evaluation = "Ended";
      } else if ( status.equalsIgnoreCase( LogStatus.START.getStatus() ) ) {
        evaluation = "Started";
      } else if ( status.equalsIgnoreCase( LogStatus.STOP.getStatus() ) ) {
        evaluation = "Stopped";
      } else if ( status.equalsIgnoreCase( LogStatus.RUNNING.getStatus() ) ) {
        evaluation = "Running";
      } else if ( status.equalsIgnoreCase( LogStatus.PAUSED.getStatus() ) ) {
        evaluation = "Paused";
      } else if ( status.equalsIgnoreCase( LogStatus.ERROR.getStatus() ) ) {
        evaluation = "Failed";
      } else {
        evaluation = "Unknown";
      }
      if ( nrErrors > 0 ) {
        evaluation += " with errors";
      } else {
        evaluation += " with success";
      }

      return evaluation + " at " + XMLHandler.date2string( date );

    } catch ( Exception e ) {
      log.logBasic( "Unable to get logging information from log table" + logTable );
    } finally {
      database.disconnect();
    }
  }
  return null;
}
 
Example 19
Source File: JobEntryWriteToLog.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
/**
 * Output message to job log.
 */
public boolean evaluate( Result result ) {
  LogChannelInterface logChannel = createLogChannel();
  String message = getRealLogMessage();

  // Filter out empty messages and those that are not visible with the job's log level
  if ( Utils.isEmpty( message ) || !getEntryLogLevel().isVisible( logChannel.getLogLevel() ) ) {
    return true;
  }

  try {
    switch ( getEntryLogLevel() ) {
      case ERROR:
        logChannel.logError( message + Const.CR );
        break;
      case MINIMAL:
        logChannel.logMinimal( message + Const.CR );
        break;
      case BASIC:
        logChannel.logBasic( message + Const.CR );
        break;
      case DETAILED:
        logChannel.logDetailed( message + Const.CR );
        break;
      case DEBUG:
        logChannel.logDebug( message + Const.CR );
        break;
      case ROWLEVEL:
        logChannel.logRowlevel( message + Const.CR );
        break;
      default: // NOTHING
        break;
    }

    return true;
  } catch ( Exception e ) {
    result.setNrErrors( 1 );
    log.logError( BaseMessages.getString( PKG, "WriteToLog.Error.Label" ), BaseMessages.getString(
      PKG, "WriteToLog.Error.Description" )
      + " : " + e.toString() );
    return false;
  }

}
 
Example 20
Source File: PDIFTPClient.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public PDIFTPClient( LogChannelInterface log ) {
  super();
  this.log = log;
  log.logBasic( BaseMessages.getString( PKG, "PDIFTPClient.DEBUG.Using.Overridden.FTPClient" ) );
}