Java Code Examples for org.pentaho.di.job.JobMeta#getLogTables()

The following examples show how to use org.pentaho.di.job.JobMeta#getLogTables() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: JobDialog.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public JobDialog( Shell parent, int style, JobMeta jobMeta, Repository rep ) {
  super( parent, style );
  this.jobMeta = jobMeta;
  this.props = PropsUI.getInstance();
  this.rep = rep;

  this.newDirectory = null;

  directoryChangeAllowed = true;

  logTables = new ArrayList<LogTableInterface>();
  logTableUserInterfaces = new ArrayList<LogTableUserInterface>();
  for ( LogTableInterface logTable : jobMeta.getLogTables() ) {
    logTables.add( (LogTableInterface) logTable.clone() );
  }
}
 
Example 2
Source File: KettleFileTableModel.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public static String getLogging( ReportSubjectLocation filename ) throws KettleException {
  List<LogTableInterface> logTables;
  if ( filename.isTransformation() ) {
    TransMeta transMeta = TransformationInformation.getInstance().getTransMeta( filename );
    logTables = transMeta.getLogTables();
  } else {
    JobMeta jobMeta = JobInformation.getInstance().getJobMeta( filename );
    logTables = jobMeta.getLogTables();
  }
  String logging = "";
  for ( Iterator<LogTableInterface> iterator = logTables.iterator(); iterator.hasNext(); ) {
    LogTableInterface logTableInterface = iterator.next();
    if ( logTableInterface.getDatabaseMeta() != null && !Utils.isEmpty( logTableInterface.getTableName() ) ) {
      if ( logging.length() > 0 ) {
        logging += ", ";
      }
      logging += logTableInterface.getTableName() + "@" + logTableInterface.getDatabaseMeta().getName();
    }
  }
  return logging;
}
 
Example 3
Source File: JobDelegate.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
protected void loadJobMetaDetails( DataNode rootNode, JobMeta jobMeta ) throws KettleException {
  try {
    jobMeta.setExtendedDescription( getString( rootNode, PROP_EXTENDED_DESCRIPTION ) );
    jobMeta.setJobversion( getString( rootNode, PROP_JOB_VERSION ) );
    jobMeta.setJobstatus( (int) rootNode.getProperty( PROP_JOB_STATUS ).getLong() );
    jobMeta.getJobLogTable().setTableName( getString( rootNode, PROP_TABLE_NAME_LOG ) );

    jobMeta.setCreatedUser( getString( rootNode, PROP_CREATED_USER ) );
    jobMeta.setCreatedDate( getDate( rootNode, PROP_CREATED_DATE ) );

    jobMeta.setModifiedUser( getString( rootNode, PROP_MODIFIED_USER ) );
    jobMeta.setModifiedDate( getDate( rootNode, PROP_MODIFIED_DATE ) );

    if ( rootNode.hasProperty( PROP_DATABASE_LOG ) ) {
      String id = rootNode.getProperty( PROP_DATABASE_LOG ).getRef().getId().toString();
      DatabaseMeta conn = ( DatabaseMeta.findDatabase( jobMeta.getDatabases(), new StringObjectId( id ) ) );
      jobMeta.getJobLogTable().setConnectionName( conn.getName() );
    }

    jobMeta.getJobLogTable().setBatchIdUsed( rootNode.getProperty( PROP_USE_BATCH_ID ).getBoolean() );
    jobMeta.setBatchIdPassed( rootNode.getProperty( PROP_PASS_BATCH_ID ).getBoolean() );
    jobMeta.getJobLogTable().setLogFieldUsed( rootNode.getProperty( PROP_USE_LOGFIELD ).getBoolean() );

    jobMeta.getJobLogTable().setLogSizeLimit( getString( rootNode, PROP_LOG_SIZE_LIMIT ) );

    // Load the logging tables too..
    //
    RepositoryAttributeInterface attributeInterface = new PurRepositoryAttribute( rootNode, jobMeta.getDatabases() );
    for ( LogTableInterface logTable : jobMeta.getLogTables() ) {
      logTable.loadFromRepository( attributeInterface );
    }

    // Load the attributes map
    //
    AttributesMapUtil.loadAttributesMap( rootNode, jobMeta );

  } catch ( Exception e ) {
    throw new KettleException( "Error loading job details", e );
  }
}
 
Example 4
Source File: JobDelegate.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
private void saveJobDetails( DataNode rootNode, JobMeta jobMeta ) throws KettleException {
  rootNode.setProperty( PROP_EXTENDED_DESCRIPTION, jobMeta.getExtendedDescription() );
  rootNode.setProperty( PROP_JOB_VERSION, jobMeta.getJobversion() );
  rootNode.setProperty( PROP_JOB_STATUS, jobMeta.getJobstatus() < 0 ? -1L : jobMeta.getJobstatus() );

  if ( jobMeta.getJobLogTable().getDatabaseMeta() != null ) {
    DataNodeRef ref = new DataNodeRef( jobMeta.getJobLogTable().getDatabaseMeta().getObjectId().getId() );
    rootNode.setProperty( PROP_DATABASE_LOG, ref );
  }
  rootNode.setProperty( PROP_TABLE_NAME_LOG, jobMeta.getJobLogTable().getTableName() );

  rootNode.setProperty( PROP_CREATED_USER, jobMeta.getCreatedUser() );
  rootNode.setProperty( PROP_CREATED_DATE, jobMeta.getCreatedDate() );
  rootNode.setProperty( PROP_MODIFIED_USER, jobMeta.getModifiedUser() );
  rootNode.setProperty( PROP_MODIFIED_DATE, jobMeta.getModifiedDate() );
  rootNode.setProperty( PROP_USE_BATCH_ID, jobMeta.getJobLogTable().isBatchIdUsed() );
  rootNode.setProperty( PROP_PASS_BATCH_ID, jobMeta.isBatchIdPassed() );
  rootNode.setProperty( PROP_USE_LOGFIELD, jobMeta.getJobLogTable().isLogFieldUsed() );
  rootNode.setProperty( PROP_SHARED_FILE, jobMeta.getSharedObjectsFile() );

  rootNode.setProperty( PROP_LOG_SIZE_LIMIT, jobMeta.getJobLogTable().getLogSizeLimit() );

  // Save the logging tables too..
  //
  RepositoryAttributeInterface attributeInterface = new PurRepositoryAttribute( rootNode, jobMeta.getDatabases() );
  for ( LogTableInterface logTable : jobMeta.getLogTables() ) {
    logTable.saveToRepository( attributeInterface );
  }

  // Load the attributes map
  //
  AttributesMapUtil.saveAttributesMap( rootNode, jobMeta );
}
 
Example 5
Source File: KettleDatabaseRepositoryJobDelegate.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
private synchronized void insertJob( JobMeta jobMeta ) throws KettleException {
  RowMetaAndData table = new RowMetaAndData();

  table.addValue(
    new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ID_JOB ), jobMeta
      .getObjectId() );
  table.addValue( new ValueMetaInteger(
    KettleDatabaseRepository.FIELD_JOB_ID_DIRECTORY ), jobMeta
    .getRepositoryDirectory().getObjectId() );
  table.addValue(
    new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_NAME ), jobMeta
      .getName() );
  table.addValue(
    new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_DESCRIPTION ), jobMeta
      .getDescription() );
  table.addValue( new ValueMetaString(
    KettleDatabaseRepository.FIELD_JOB_EXTENDED_DESCRIPTION ), jobMeta
    .getExtendedDescription() );
  table.addValue(
    new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_JOB_VERSION ), jobMeta
      .getJobversion() );
  table.addValue(
    new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_JOB_STATUS ), new Long(
      jobMeta.getJobstatus() < 0 ? -1L : jobMeta.getJobstatus() ) );

  table.addValue( new ValueMetaInteger(
    KettleDatabaseRepository.FIELD_JOB_ID_DATABASE_LOG ), jobMeta
    .getJobLogTable().getDatabaseMeta() != null
    ? jobMeta.getJobLogTable().getDatabaseMeta().getObjectId() : -1L );
  table.addValue( new ValueMetaString(
    KettleDatabaseRepository.FIELD_JOB_TABLE_NAME_LOG ), jobMeta
    .getJobLogTable().getTableName() );
  table.addValue( new ValueMetaBoolean(
    KettleDatabaseRepository.FIELD_JOB_USE_BATCH_ID ), jobMeta
    .getJobLogTable().isBatchIdUsed() );
  table.addValue( new ValueMetaBoolean(
    KettleDatabaseRepository.FIELD_JOB_USE_LOGFIELD ), jobMeta
    .getJobLogTable().isLogFieldUsed() );
  repository.connectionDelegate.insertJobAttribute(
    jobMeta.getObjectId(), 0, KettleDatabaseRepository.JOB_ATTRIBUTE_LOG_SIZE_LIMIT, 0, jobMeta
      .getJobLogTable().getLogSizeLimit() );

  table.addValue(
    new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_CREATED_USER ), jobMeta
      .getCreatedUser() );
  table.addValue(
    new ValueMetaDate( KettleDatabaseRepository.FIELD_JOB_CREATED_DATE ), jobMeta
      .getCreatedDate() );
  table.addValue( new ValueMetaString(
    KettleDatabaseRepository.FIELD_JOB_MODIFIED_USER ), jobMeta
    .getModifiedUser() );
  table.addValue(
    new ValueMetaDate( KettleDatabaseRepository.FIELD_JOB_MODIFIED_DATE ), jobMeta
      .getModifiedDate() );
  table.addValue( new ValueMetaBoolean(
    KettleDatabaseRepository.FIELD_JOB_PASS_BATCH_ID ), jobMeta
    .isBatchIdPassed() );
  table.addValue(
    new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_SHARED_FILE ), jobMeta
      .getSharedObjectsFile() );

  repository.connectionDelegate.getDatabase().prepareInsert(
    table.getRowMeta(), KettleDatabaseRepository.TABLE_R_JOB );
  repository.connectionDelegate.getDatabase().setValuesInsert( table );
  repository.connectionDelegate.getDatabase().insertRow();
  if ( log.isDebug() ) {
    log.logDebug( "Inserted new record into table "
      + quoteTable( KettleDatabaseRepository.TABLE_R_JOB ) + " with data : " + table );
  }
  repository.connectionDelegate.getDatabase().closeInsert();

  // Save the logging connection link...
  if ( jobMeta.getJobLogTable().getDatabaseMeta() != null ) {
    repository.insertJobEntryDatabase( jobMeta.getObjectId(), null, jobMeta
      .getJobLogTable().getDatabaseMeta().getObjectId() );
  }

  // Save the logging tables too..
  //
  RepositoryAttributeInterface attributeInterface =
    new KettleDatabaseRepositoryJobAttribute( repository.connectionDelegate, jobMeta.getObjectId() );
  for ( LogTableInterface logTable : jobMeta.getLogTables() ) {
    logTable.saveToRepository( attributeInterface );
  }
}