Java Code Examples for org.pentaho.di.core.logging.LogChannelInterface#logDetailed()

The following examples show how to use org.pentaho.di.core.logging.LogChannelInterface#logDetailed() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TransMetaModifier.java    From pentaho-pdi-dataset with Apache License 2.0 5 votes vote down vote up
private void handleInputDataSet( LogChannelInterface log, TransUnitTestSetLocation inputLocation, TransUnitTest unitTest, TransMeta transMeta, StepMeta stepMeta,
                                 FactoriesHierarchy factoriesHierarchy ) throws KettleException {

  String inputSetName = inputLocation.getDataSetName();

  if ( log.isDetailed() ) {
    log.logDetailed( "Replacing step '" + stepMeta.getName() + "' with an Injector for dataset '" + inputSetName + "'" );
  }

  DataSet dataSet;
  try {
    dataSet = factoriesHierarchy.getSetFactory().loadElement( inputSetName );
  } catch ( MetaStoreException e ) {
    throw new KettleException( "Unable to load data set '" + inputSetName + "'" );
  }

  // OK, this step needs to be replaced by an Injector step...
  // Which fields do we need to use?
  //
  final RowMetaInterface stepFields = DataSetConst.getStepOutputFields( log, dataSet, inputLocation );

  if ( log.isDetailed() ) {
    log.logDetailed( "Input Data Set '" + inputSetName + "' Injector fields : '" + stepFields.toString() );
  }

  InjectorMeta injectorMeta = new InjectorMeta();
  injectorMeta.allocate( stepFields.size() );
  for ( int x = 0; x < stepFields.size(); x++ ) {
    injectorMeta.getFieldname()[ x ] = stepFields.getValueMeta( x ).getName();
    injectorMeta.getType()[ x ] = stepFields.getValueMeta( x ).getType();
    injectorMeta.getLength()[ x ] = stepFields.getValueMeta( x ).getLength();
    injectorMeta.getPrecision()[ x ] = stepFields.getValueMeta( x ).getPrecision();

    // Only the step metadata, type...
    stepMeta.setStepMetaInterface( injectorMeta );
    stepMeta.setStepID( PluginRegistry.getInstance().getPluginId( StepPluginType.class, injectorMeta ) );
  }
}
 
Example 2
Source File: TransMetaModifier.java    From pentaho-pdi-dataset with Apache License 2.0 5 votes vote down vote up
private void handleGoldenDataSet( LogChannelInterface log, TransUnitTestSetLocation goldenSetName, StepMeta stepMeta ) {

    if ( log.isDetailed() ) {
      log.logDetailed( "Replacing step '" + stepMeta.getName() + "' with an Dummy for golden dataset '" + goldenSetName + "'" );
    }

    replaceStepWithDummy( log, stepMeta );
  }
 
Example 3
Source File: TransMetaModifier.java    From pentaho-pdi-dataset with Apache License 2.0 5 votes vote down vote up
private void handleTweakBypassStep( LogChannelInterface log, StepMeta stepMeta ) {
  if ( log.isDetailed() ) {
    log.logDetailed( "Replacing step '" + stepMeta.getName() + "' with an Dummy for Bypass step tweak" );
  }

  replaceStepWithDummy( log, stepMeta );
}
 
Example 4
Source File: TransWebSocketEngineAdapter.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
private void logToChannel( LogChannelInterface logChannel, LogEntry data ) {
  LogLevel logLogLevel = data.getLogLogLevel();
  switch ( logLogLevel ) {
    case ERROR:
      if ( data.getThrowable() != null ) {
        logChannel.logError( data.getMessage(), data.getThrowable() );
      } else {
        logChannel.logError( data.getMessage() );
      }
      break;
    case MINIMAL:
      logChannel.logMinimal( data.getMessage() );
      break;
    case BASIC:
      logChannel.logBasic( data.getMessage() );
      break;
    case DETAILED:
      logChannel.logDetailed( data.getMessage() );
      break;
    case DEBUG:
      logChannel.logDebug( data.getMessage() );
      break;
    case TRACE:
      logChannel.logRowlevel( data.getMessage() );
      break;
  }
}
 
Example 5
Source File: InjectDataSetIntoTransExtensionPoint.java    From pentaho-pdi-dataset with Apache License 2.0 4 votes vote down vote up
private void injectDataSetIntoStep( final Trans trans, final String dataSetName,
                                    final MetaStoreFactory<DataSet> dataSetFactory, final StepMeta stepMeta,
                                    TransUnitTestSetLocation inputLocation ) throws MetaStoreException, KettleException {

  final DataSet dataSet = dataSetFactory.loadElement( dataSetName );
  final LogChannelInterface log = trans.getLogChannel();

  final RowProducer rowProducer = trans.addRowProducer( stepMeta.getName(), 0 );

  // Look for the step into which we'll inject rows...
  //
  StepMetaDataCombi combi = null;
  for ( StepMetaDataCombi step : trans.getSteps() ) {
    if ( step.stepname.equals( stepMeta.getName() ) ) {
      combi = step;
      break;
    }
  }

  if ( combi != null ) {

    // Get the rows of the mapped values in the mapped order sorted as asked
    //
    final List<Object[]> dataSetRows = dataSet.getAllRows( log, inputLocation );
    RowMetaInterface dataSetRowMeta = dataSet.getMappedDataSetFieldsRowMeta( inputLocation );

    // The rows to inject are always driven by the dataset, NOT the step it replaces (!) for simplicity
    //
    RowMetaInterface injectRowMeta = new RowMeta();

    // Figure out which fields to pass
    // Only inject those mentioned in the field mappings...
    //
    int[] fieldIndexes = new int[ inputLocation.getFieldMappings().size() ];
    for ( int i = 0; i < inputLocation.getFieldMappings().size(); i++ ) {
      TransUnitTestFieldMapping fieldMapping = inputLocation.getFieldMappings().get( i );
      fieldIndexes[ i ] = dataSetRowMeta.indexOfValue( fieldMapping.getDataSetFieldName() );
      if ( fieldIndexes[ i ] < 0 ) {
        throw new KettleException( "Unable to find mapped field '" + fieldMapping.getDataSetFieldName() + "' in data set '" + dataSet.getName() + "'" );
      }
      ValueMetaInterface injectValueMeta = dataSetRowMeta.getValueMeta( fieldIndexes[ i ] ).clone();
      // Rename to the step output names though...
      //
      injectValueMeta.setName( fieldMapping.getStepFieldName() );
      injectRowMeta.addValueMeta( injectValueMeta );
    }

    log.logDetailed( "Injecting data set '" + dataSetName + "' into step '" + stepMeta.getName() + "', fields: " + Arrays.toString( injectRowMeta.getFieldNames() ) );

    // Pass rows
    //
    Runnable runnable = new Runnable() {
      @Override
      public void run() {
        try {

          for ( Object[] dataSetRow : dataSetRows ) {
            // pass the row with the external names, in the right order and with the selected columns from the data set
            //
            Object[] row = RowDataUtil.allocateRowData( injectRowMeta.size() );
            for ( int i = 0; i < fieldIndexes.length; i++ ) {
              row[ i ] = dataSetRow[ fieldIndexes[ i ] ];
            }
            rowProducer.putRow( injectRowMeta, row );
          }
          rowProducer.finished();

        } catch ( Exception e ) {
          throw new RuntimeException( "Problem injecting data set '" + dataSetName + "' row into step '" + stepMeta.getName() + "'", e );
        }
      }
    };
    Thread thread = new Thread( runnable );
    thread.start();


  }
}
 
Example 6
Source File: JobEntryWriteToLog.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
/**
 * Output message to job log.
 */
public boolean evaluate( Result result ) {
  LogChannelInterface logChannel = createLogChannel();
  String message = getRealLogMessage();

  // Filter out empty messages and those that are not visible with the job's log level
  if ( Utils.isEmpty( message ) || !getEntryLogLevel().isVisible( logChannel.getLogLevel() ) ) {
    return true;
  }

  try {
    switch ( getEntryLogLevel() ) {
      case ERROR:
        logChannel.logError( message + Const.CR );
        break;
      case MINIMAL:
        logChannel.logMinimal( message + Const.CR );
        break;
      case BASIC:
        logChannel.logBasic( message + Const.CR );
        break;
      case DETAILED:
        logChannel.logDetailed( message + Const.CR );
        break;
      case DEBUG:
        logChannel.logDebug( message + Const.CR );
        break;
      case ROWLEVEL:
        logChannel.logRowlevel( message + Const.CR );
        break;
      default: // NOTHING
        break;
    }

    return true;
  } catch ( Exception e ) {
    result.setNrErrors( 1 );
    log.logError( BaseMessages.getString( PKG, "WriteToLog.Error.Label" ), BaseMessages.getString(
      PKG, "WriteToLog.Error.Description" )
      + " : " + e.toString() );
    return false;
  }

}
 
Example 7
Source File: PurRepository.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
/**
 * Load all transformations referenced by {@code files}.
 *
 * @param monitor
 * @param log
 * @param files
 *          Transformation files to load.
 * @param setInternalVariables
 *          Should internal variables be set when loading? (Note: THIS IS IGNORED, they are always set)
 * @return Loaded transformations
 * @throws KettleException
 *           Error loading data for transformations from repository
 */
protected List<TransMeta> loadTransformations( final ProgressMonitorListener monitor, final LogChannelInterface log,
                                               final List<RepositoryFile> files, final boolean setInternalVariables )
  throws KettleException {
  List<TransMeta> transformations = new ArrayList<TransMeta>( files.size() );

  readWriteLock.readLock().lock();
  List<NodeRepositoryFileData> filesData;
  List<VersionSummary> versions;
  try {
    filesData = pur.getDataForReadInBatch( files, NodeRepositoryFileData.class );
    versions = pur.getVersionSummaryInBatch( files );
  } finally {
    readWriteLock.readLock().unlock();
  }

  Iterator<RepositoryFile> filesIter = files.iterator();
  Iterator<NodeRepositoryFileData> filesDataIter = filesData.iterator();
  Iterator<VersionSummary> versionsIter = versions.iterator();
  while ( ( monitor == null || !monitor.isCanceled() ) && filesIter.hasNext() ) {
    RepositoryFile file = filesIter.next();
    NodeRepositoryFileData fileData = filesDataIter.next();
    VersionSummary version = versionsIter.next();
    String
      dirPath =
      file.getPath().substring( 0, file.getPath().lastIndexOf( RepositoryDirectory.DIRECTORY_SEPARATOR ) );
    try {
      log.logDetailed( "Loading/Exporting transformation [{0} : {1}]  ({2})", dirPath, file.getTitle(), file
        .getPath() ); //$NON-NLS-1$
      if ( monitor != null ) {
        monitor.subTask( "Exporting transformation [" + file.getPath() + "]" ); //$NON-NLS-1$ //$NON-NLS-2$
      }
      TransMeta
        transMeta =
        buildTransMeta( file, findDirectory( dirPath ), fileData, createObjectRevision( version ) );
      ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationMetaLoaded.id, transMeta );
      transformations.add( transMeta );
    } catch ( Exception ex ) {
      log.logDetailed( "Unable to load transformation [" + file.getPath() + "]", ex ); //$NON-NLS-1$ //$NON-NLS-2$
      log.logError( "An error occurred reading transformation [" + file.getTitle() + "] from directory [" + dirPath
        + "] : " + ex.getMessage() ); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
      log.logError( "Transformation [" + file.getTitle() + "] from directory [" + dirPath
        + "] was not exported because of a loading error!" ); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
    }
  }
  return transformations;
}
 
Example 8
Source File: PurRepository.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
/**
 * Load all jobs referenced by {@code files}.
 *
 * @param monitor
 * @param log
 * @param files
 *          Job files to load.
 * @param setInternalVariables
 *          Should internal variables be set when loading? (Note: THIS IS IGNORED, they are always set)
 * @return Loaded jobs
 * @throws KettleException
 *           Error loading data for jobs from repository
 */
protected List<JobMeta> loadJobs( final ProgressMonitorListener monitor, final LogChannelInterface log,
                                  final List<RepositoryFile> files, final boolean setInternalVariables )
  throws KettleException {
  List<JobMeta> jobs = new ArrayList<JobMeta>( files.size() );

  readWriteLock.readLock().lock();
  List<NodeRepositoryFileData> filesData;
  List<VersionSummary> versions;
  try {
    filesData = pur.getDataForReadInBatch( files, NodeRepositoryFileData.class );
    versions = pur.getVersionSummaryInBatch( files );
  } finally {
    readWriteLock.readLock().unlock();
  }

  Iterator<RepositoryFile> filesIter = files.iterator();
  Iterator<NodeRepositoryFileData> filesDataIter = filesData.iterator();
  Iterator<VersionSummary> versionsIter = versions.iterator();
  while ( ( monitor == null || !monitor.isCanceled() ) && filesIter.hasNext() ) {
    RepositoryFile file = filesIter.next();
    NodeRepositoryFileData fileData = filesDataIter.next();
    VersionSummary version = versionsIter.next();
    try {
      String
        dirPath =
        file.getPath().substring( 0, file.getPath().lastIndexOf( RepositoryDirectory.DIRECTORY_SEPARATOR ) );
      log.logDetailed( "Loading/Exporting job [{0} : {1}]  ({2})", dirPath, file.getTitle(),
        file.getPath() ); //$NON-NLS-1$
      if ( monitor != null ) {
        monitor.subTask( "Exporting job [" + file.getPath() + "]" ); //$NON-NLS-1$ //$NON-NLS-2$
      }
      JobMeta jobMeta = buildJobMeta( file, findDirectory( dirPath ), fileData, createObjectRevision( version ) );
      ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.JobMetaLoaded.id, jobMeta );
      jobs.add( jobMeta );
    } catch ( Exception ex ) {
      log.logError( "Unable to load job [" + file.getPath() + "]", ex ); //$NON-NLS-1$ //$NON-NLS-2$
    }
  }
  return jobs;

}
 
Example 9
Source File: SalesforceConnection.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
/**
 * Construct a new Salesforce Connection
 */
public SalesforceConnection( LogChannelInterface logInterface, String url, String username, String password ) throws KettleException {
  if ( logInterface == null ) {
    this.log = KettleLogStore.getLogChannelInterfaceFactory().create( this );
  } else {
    this.log = logInterface;
  }
  this.url = url;
  setUsername( username );
  setPassword( password );
  setTimeOut( 0 );

  this.binding = null;
  this.loginResult = null;
  this.userInfo = null;
  this.sql = null;
  this.serverTimestamp = null;
  this.qr = null;
  this.startDate = null;
  this.endDate = null;
  this.sObjects = null;
  this.recordsFilter = SalesforceConnectionUtils.RECORDS_FILTER_ALL;
  this.fieldsList = null;
  this.queryResultSize = 0;
  this.recordsCount = 0;
  setUsingCompression( false );
  setRollbackAllChangesOnError( false );

  // check target URL
  if ( Utils.isEmpty( getURL() ) ) {
    throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.TargetURLMissing.Error" ) );
  }

  // check username
  if ( Utils.isEmpty( getUsername() ) ) {
    throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.UsernameMissing.Error" ) );
  }

  if ( log.isDetailed() ) {
    logInterface.logDetailed( BaseMessages.getString( PKG, "SalesforceInput.Log.NewConnection" ) );
  }
}