org.pentaho.di.core.database.DatabaseMeta Java Examples

The following examples show how to use org.pentaho.di.core.database.DatabaseMeta. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source Project: kettle-beam   Author: mattcasters   File: BeamConsumeMeta.java    License: Apache License 2.0 6 votes vote down vote up
@Override public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException {
  bootstrapServers = XMLHandler.getTagValue( stepnode, BOOTSTRAP_SERVERS );
  topics = XMLHandler.getTagValue( stepnode, TOPICS );
  keyField = XMLHandler.getTagValue( stepnode, KEY_FIELD );
  messageField = XMLHandler.getTagValue( stepnode, MESSAGE_FIELD );
  groupId = XMLHandler.getTagValue( stepnode, GROUP_ID );
  usingProcessingTime="Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, USE_PROCESSING_TIME ) );
  usingLogAppendTime="Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, USE_LOG_APPEND_TIME ) );
  usingCreateTime="Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, USE_CREATE_TIME ) );
  restrictedToCommitted="Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, RESTRICT_TO_COMMITTED ) );
  allowingCommitOnConsumedOffset="Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, ALLOW_COMMIT_ON_CONSUMED ) );
  configOptions = new ArrayList<>(  );
  Node optionsNode = XMLHandler.getSubNode( stepnode, CONFIG_OPTIONS );
  List<Node> optionNodes = XMLHandler.getNodes( optionsNode, CONFIG_OPTION );
  for (Node optionNode : optionNodes) {
    String parameter = XMLHandler.getTagValue( optionNode, CONFIG_OPTION_PARAMETER );
    String value = XMLHandler.getTagValue( optionNode, CONFIG_OPTION_VALUE );
    ConfigOption.Type type = ConfigOption.Type.getTypeFromName( XMLHandler.getTagValue( optionNode, CONFIG_OPTION_TYPE ) );
    configOptions.add( new ConfigOption(parameter, value, type));
  }
}
 
Example #2
Source Project: pentaho-kettle   Author: pentaho   File: DatabaseMetaStoreUtil.java    License: Apache License 2.0 6 votes vote down vote up
public static List<DatabaseMeta> getDatabaseElements( IMetaStore metaStore ) throws MetaStoreException {
  List<DatabaseMeta> databases = new ArrayList<DatabaseMeta>();

  // If the data type doesn't exist, it's an empty list...
  //
  IMetaStoreElementType elementType =
    metaStore.getElementTypeByName(
      PentahoDefaults.NAMESPACE, PentahoDefaults.DATABASE_CONNECTION_ELEMENT_TYPE_NAME );
  if ( elementType == null ) {
    return databases;
  }

  List<IMetaStoreElement> elements = metaStore.getElements( PentahoDefaults.NAMESPACE, elementType );
  for ( IMetaStoreElement element : elements ) {
    try {
      DatabaseMeta databaseMeta = loadDatabaseMetaFromDatabaseElement( metaStore, element );
      databases.add( databaseMeta );
    } catch ( Exception e ) {
      throw new MetaStoreException( "Unable to load database from element with name '"
        + element.getName() + "' and type '" + elementType.getName() + "'", e );
    }
  }

  return databases;
}
 
Example #3
Source Project: pentaho-kettle   Author: pentaho   File: JobEntrySQLDialog.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Copy information from the meta-data input to the dialog fields.
 */
public void getData() {
  wName.setText( Const.nullToEmpty( jobEntry.getName() ) );
  wSQL.setText( Const.nullToEmpty( jobEntry.getSQL() ) );
  DatabaseMeta dbinfo = jobEntry.getDatabase();
  if ( dbinfo != null && dbinfo.getName() != null ) {
    wConnection.setText( dbinfo.getName() );
  } else {
    wConnection.setText( "" );
  }

  wUseSubs.setSelection( jobEntry.getUseVariableSubstitution() );
  wSQLFromFile.setSelection( jobEntry.getSQLFromFile() );
  wSendOneStatement.setSelection( jobEntry.isSendOneStatement() );

  wFilename.setText( Const.nullToEmpty( jobEntry.getSQLFilename() ) );

  wName.selectAll();
  wName.setFocus();
}
 
Example #4
Source Project: pentaho-kettle   Author: pentaho   File: ExecSQLRowMeta.java    License: Apache License 2.0 6 votes vote down vote up
public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException {
  this.databasesList = databases;
  try {
    databaseMeta = rep.loadDatabaseMetaFromStepAttribute( id_step, "id_connection", databases );
    commitSize = (int) rep.getStepAttributeInteger( id_step, "commit" );
    sqlField = rep.getStepAttributeString( id_step, "sql_field" );

    insertField = rep.getStepAttributeString( id_step, "insert_field" );
    updateField = rep.getStepAttributeString( id_step, "update_field" );
    deleteField = rep.getStepAttributeString( id_step, "delete_field" );
    readField = rep.getStepAttributeString( id_step, "read_field" );
    sqlFromfile = rep.getStepAttributeBoolean( id_step, "sqlFromfile" );

    String sendOneStatementString = rep.getStepAttributeString( id_step, "sendOneStatement" );
    if ( Utils.isEmpty( sendOneStatementString ) ) {
      sendOneStatement = true;
    } else {
      sendOneStatement = rep.getStepAttributeBoolean( id_step, "sendOneStatement" );
    }

  } catch ( Exception e ) {
    throw new KettleException( BaseMessages.getString(
      PKG, "ExecSQLRowMeta.Exception.UnexpectedErrorReadingStepInfo" ), e );
  }
}
 
Example #5
Source Project: pentaho-kettle   Author: pentaho   File: JobEntryColumnsExist.java    License: Apache License 2.0 6 votes vote down vote up
public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers,
  Repository rep, IMetaStore metaStore ) throws KettleXMLException {
  try {
    super.loadXML( entrynode, databases, slaveServers );
    tablename = XMLHandler.getTagValue( entrynode, "tablename" );
    schemaname = XMLHandler.getTagValue( entrynode, "schemaname" );

    String dbname = XMLHandler.getTagValue( entrynode, "connection" );
    connection = DatabaseMeta.findDatabase( databases, dbname );

    Node fields = XMLHandler.getSubNode( entrynode, "fields" );

    // How many field arguments?
    int nrFields = XMLHandler.countNodes( fields, "field" );
    allocate( nrFields );

    // Read them all...
    for ( int i = 0; i < nrFields; i++ ) {
      Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i );
      arguments[i] = XMLHandler.getTagValue( fnode, "name" );
    }

  } catch ( KettleException e ) {
    throw new KettleXMLException( BaseMessages.getString( PKG, "JobEntryColumnsExist.Meta.UnableLoadXml" ), e );
  }
}
 
Example #6
Source Project: pentaho-kafka-consumer   Author: RuckusWirelessIL   File: KafkaConsumerMeta.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void readRep(Repository rep, IMetaStore metaStore, ObjectId stepId, List<DatabaseMeta> databases)
        throws KettleException {
    try {
        topic = rep.getStepAttributeString(stepId, ATTR_TOPIC);
        field = rep.getStepAttributeString(stepId, ATTR_FIELD);
        keyField = rep.getStepAttributeString(stepId, ATTR_KEY_FIELD);
        limit = rep.getStepAttributeString(stepId, ATTR_LIMIT);
        timeout = rep.getStepAttributeString(stepId, ATTR_TIMEOUT);
        stopOnEmptyTopic = rep.getStepAttributeBoolean(stepId, ATTR_STOP_ON_EMPTY_TOPIC);
        String kafkaPropsXML = rep.getStepAttributeString(stepId, ATTR_KAFKA);
        if (kafkaPropsXML != null) {
            kafkaProperties.loadFromXML(new ByteArrayInputStream(kafkaPropsXML.getBytes()));
        }
        // Support old versions:
        for (String name : KAFKA_PROPERTIES_NAMES) {
            String value = rep.getStepAttributeString(stepId, name);
            if (value != null) {
                kafkaProperties.put(name, value);
            }
        }
    } catch (Exception e) {
        throw new KettleException("KafkaConsumerMeta.Exception.loadRep", e);
    }
}
 
Example #7
Source Project: pentaho-kettle   Author: pentaho   File: PGBulkLoader.java    License: Apache License 2.0 6 votes vote down vote up
void processTruncate() throws Exception {
  Connection connection = data.db.getConnection();

  String loadAction = environmentSubstitute( meta.getLoadAction() );

  if ( loadAction.equalsIgnoreCase( "truncate" ) ) {
    DatabaseMeta dm = meta.getDatabaseMeta();
    String tableName =
      dm.getQuotedSchemaTableCombination( environmentSubstitute( meta.getSchemaName() ),
        environmentSubstitute( meta.getTableName() ) );
    logBasic( "Launching command: " + "TRUNCATE " + tableName );

    Statement statement = connection.createStatement();

    try {
      statement.executeUpdate( "TRUNCATE " + tableName );
    } catch ( Exception ex ) {
      throw new KettleException( "Error while truncating " + tableName, ex );
    } finally {
      statement.close();
    }
  }
}
 
Example #8
Source Project: pentaho-kettle   Author: pentaho   File: JobEntrySyslog.java    License: Apache License 2.0 6 votes vote down vote up
public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers,
  Repository rep, IMetaStore metaStore ) throws KettleXMLException {
  try {
    super.loadXML( entrynode, databases, slaveServers );
    port = XMLHandler.getTagValue( entrynode, "port" );
    serverName = XMLHandler.getTagValue( entrynode, "servername" );
    facility = XMLHandler.getTagValue( entrynode, "facility" );
    priority = XMLHandler.getTagValue( entrynode, "priority" );
    message = XMLHandler.getTagValue( entrynode, "message" );
    datePattern = XMLHandler.getTagValue( entrynode, "datePattern" );
    addTimestamp = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "addTimestamp" ) );
    addHostname = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "addHostname" ) );

  } catch ( KettleXMLException xe ) {
    throw new KettleXMLException( "Unable to load job entry of type 'Syslog' from XML node", xe );
  }
}
 
Example #9
Source Project: pentaho-kettle   Author: pentaho   File: JobGeneratorTest.java    License: Apache License 2.0 6 votes vote down vote up
@Before
public void setUp() throws Exception {
  final StarDomain starDomain = mock( StarDomain.class );

  final Domain domain = mock( Domain.class );
  when( domain.getProperty( eq( DefaultIDs.DOMAIN_TARGET_DATABASE ) ) ).thenReturn( "test_domain_target_db" );
  when( starDomain.getDomain() ).thenReturn( domain );

  final Repository repository = mock( Repository.class );
  final RepositoryDirectoryInterface targetDirectory = mock( RepositoryDirectoryInterface.class );

  final DatabaseMeta meta = Mockito.mock( DatabaseMeta.class );
  Mockito.when( meta.getName() ).thenReturn( "test_domain_target_db" );
  final LinkedList<DatabaseMeta> databases = new LinkedList<DatabaseMeta>() {
    {
      add( meta );
    }
  };

  final String locale = Locale.US.toString();

  jobGenerator = new JobGenerator( starDomain, repository, targetDirectory, databases, locale );
}
 
Example #10
Source Project: pentaho-metadata   Author: pentaho   File: AdvancedMQLQueryImplIT.java    License: GNU Lesser General Public License v2.1 6 votes vote down vote up
public void testAliasedJoin() throws Exception {

    BusinessModel model = getDefaultModel();
    BusinessColumn bc1 = model.findBusinessColumn( "bc1" ); //$NON-NLS-1$
    BusinessColumn bc3 = model.findBusinessColumn( "bc3" ); //$NON-NLS-1$

    DatabaseMeta databaseMeta = new DatabaseMeta( "", "ORACLE", "Native", "", "", "", "", "" ); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$ //$NON-NLS-5$ //$NON-NLS-6$ //$NON-NLS-7$
    AdvancedMQLQuery myTest = new AdvancedMQLQuery( null, model, databaseMeta, "en_US" );

    myTest.addSelection( new AdvancedMQLQuery.AliasedSelection( bc1, null ) );
    myTest.addSelection( new AdvancedMQLQuery.AliasedSelection( bc1, "alias1" ) );
    myTest.addSelection( new AdvancedMQLQuery.AliasedSelection( bc3, null ) );
    myTest.addSelection( new AdvancedMQLQuery.AliasedSelection( "[alias1.bc1] * 3" ) );

    myTest.addConstraint( "AND", "[alias1.bc1] > 10" );
    myTest.addConstraint( "AND", "[bt3.bc3] > 10" );

    // SQLQueryTest.printOutJava(myTest.getQuery().getQuery());
    assertEqualsIgnoreWhitespaces( "SELECT DISTINCT " + "bt1.pc1 AS COL0 ," + "bt1_alias1.pc1 AS COL1 ,"
        + "bt3.pc3 AS COL2 , " + "bt1_alias1.pc1 * 3 AS COL3 " + "FROM " + "pt1 bt1 ," + "pt2 bt2 ," + "pt3 bt3 ,"
        + "pt1 bt1_alias1 ," + "pt2 bt2_alias1 " + "WHERE " + "( bt1.pc1 = bt2.pc2 ) " + "AND ( bt3.pc3 = bt2.pc2 ) "
        + "AND ( bt1_alias1.pc1 = bt2_alias1.pc2 ) " + "AND ( bt3.pc3 = bt2_alias1.pc2 ) "
        + "AND (( bt1_alias1.pc1 > 10 ) " + "AND ( bt3.pc3 > 10 ))", myTest.getQuery().getQuery() );
  }
 
Example #11
Source Project: pentaho-kettle   Author: pentaho   File: XMLJoinMeta.java    License: Apache License 2.0 6 votes vote down vote up
public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases )
  throws KettleException {
  try {
    targetXMLstep = rep.getStepAttributeString( id_step, "targetXMLstep" );
    targetXMLfield = rep.getStepAttributeString( id_step, "targetXMLfield" );
    sourceXMLstep = rep.getStepAttributeString( id_step, "sourceXMLstep" );
    sourceXMLfield = rep.getStepAttributeString( id_step, "sourceXMLfield" );
    targetXPath = rep.getStepAttributeString( id_step, "targetXPath" );
    complexJoin = rep.getStepAttributeBoolean( id_step, "complexJoin" );
    joinCompareField = rep.getStepAttributeString( id_step, "joinCompareField" );
    valueXMLfield = rep.getStepAttributeString( id_step, "valueXMLfield" );
    encoding = rep.getStepAttributeString( id_step, "encoding" );
    omitXMLHeader = rep.getStepAttributeBoolean( id_step, "omitXMLHeader" );
    omitNullValues = rep.getStepAttributeBoolean( id_step, "omitNullValues" );

  } catch ( Exception e ) {
    throw new KettleException( "Unexpected error reading step information from the repository", e );
  }
}
 
Example #12
Source Project: pentaho-kettle   Author: pentaho   File: EditConnectionListenerTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void replaceSharedConnectionDoesNotExecuted_for_nonshared_connection() {
  dialog.transMeta.addDatabase( createDefaultDatabase( false ) );
  editConnectionListener.widgetSelected( null );

  verify( editConnectionListener, never() ).replaceSharedConnection( any( DatabaseMeta.class ), any(
      DatabaseMeta.class ) );
}
 
Example #13
Source Project: pentaho-kettle   Author: pentaho   File: PurRepository_DatabaseNames_IT.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void getDatabaseId_ExactMatch() throws Exception {
  final String name = UUID.randomUUID().toString();
  DatabaseMeta db = saveDatabase( name );

  ObjectId id = purRepository.getDatabaseID( name );
  assertEquals( db.getObjectId(), id );
}
 
Example #14
Source Project: pentaho-kettle   Author: pentaho   File: AddSequenceMeta.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public DatabaseMeta[] getUsedDatabaseConnections() {
  if ( database != null ) {
    return new DatabaseMeta[] { database };
  } else {
    return super.getUsedDatabaseConnections();
  }
}
 
Example #15
Source Project: pentaho-kettle   Author: pentaho   File: TransDependency.java    License: Apache License 2.0 5 votes vote down vote up
public TransDependency( Node depnode, List<DatabaseMeta> databases ) throws KettleXMLException {
  try {
    String depcon = XMLHandler.getTagValue( depnode, "connection" );
    db = DatabaseMeta.findDatabase( databases, depcon );
    tablename = XMLHandler.getTagValue( depnode, "table" );
    fieldname = XMLHandler.getTagValue( depnode, "field" );
  } catch ( Exception e ) {
    throw new KettleXMLException( BaseMessages.getString(
      PKG, "TransDependency.Exception.UnableToLoadTransformation" ), e );
  }
}
 
Example #16
Source Project: pentaho-kettle   Author: pentaho   File: JobEntryFileExists.java    License: Apache License 2.0 5 votes vote down vote up
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
  List<SlaveServer> slaveServers ) throws KettleException {
  try {
    filename = rep.getJobEntryAttributeString( id_jobentry, "filename" );
  } catch ( KettleException dbe ) {
    throw new KettleException( BaseMessages.getString(
      PKG, "JobEntryFileExists.ERROR_0002_Cannot_Load_Job_From_Repository", id_jobentry ), dbe );
  }
}
 
Example #17
Source Project: pentaho-kettle   Author: pentaho   File: TableOutputMeta.java    License: Apache License 2.0 5 votes vote down vote up
public DatabaseMeta[] getUsedDatabaseConnections() {
  if ( databaseMeta != null ) {
    return new DatabaseMeta[] { databaseMeta };
  } else {
    return super.getUsedDatabaseConnections();
  }
}
 
Example #18
Source Project: pentaho-kettle   Author: pentaho   File: MonetDBBulkLoader.java    License: Apache License 2.0 5 votes vote down vote up
protected void executeSql( String query ) throws Exception {
  if ( this.meta == null ) {
    throw new KettleException( "No metadata available to determine connection information from." );
  }
  DatabaseMeta dm = meta.getDatabaseMeta();
  String hostname = environmentSubstitute( Const.NVL( dm.getHostname(), "" ) );
  String portnum = environmentSubstitute( Const.NVL( dm.getDatabasePortNumberString(), "" ) );
  String user = environmentSubstitute( Const.NVL( dm.getUsername(), "" ) );
  String password = environmentSubstitute( Const.NVL( dm.getPassword(), "" ) );
  String db = environmentSubstitute( Const.NVL( dm.getDatabaseName(), "" ) );

  executeSql( query, hostname, Integer.valueOf( portnum ), user, password, db );

}
 
Example #19
Source Project: pentaho-kettle   Author: pentaho   File: PrivateDatabasesTestTemplate.java    License: Apache License 2.0 5 votes vote down vote up
@SuppressWarnings( "unchecked" )
protected SharedObjects createFakeSharedObjects( DatabaseMeta... shared ) throws Exception {
  SharedObjects fake = new SharedObjects();
  Map map = fake.getObjectsMap();
  map.clear();

  if ( shared != null ) {
    // hacky solution
    for ( DatabaseMeta meta : shared ) {
      map.put( new Object(), meta );
    }
  }

  return fake;
}
 
Example #20
Source Project: pentaho-kettle   Author: pentaho   File: JobLogTable.java    License: Apache License 2.0 5 votes vote down vote up
public static JobLogTable getDefault( VariableSpace space, HasDatabasesInterface databasesInterface ) {
  JobLogTable table = new JobLogTable( space, databasesInterface );

  table.fields.add( new LogTableField( ID.ID_JOB.id, true, false, "ID_JOB", BaseMessages.getString( PKG, "JobLogTable.FieldName.BatchID" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.BatchID" ), ValueMetaInterface.TYPE_INTEGER, 8 ) );
  table.fields.add( new LogTableField( ID.CHANNEL_ID.id, true, false, "CHANNEL_ID", BaseMessages.getString( PKG, "JobLogTable.FieldName.ChannelID" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.ChannelID" ), ValueMetaInterface.TYPE_STRING, 255 ) );
  table.fields.add( new LogTableField( ID.JOBNAME.id, true, false, "JOBNAME", BaseMessages.getString( PKG, "JobLogTable.FieldName.JobName" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.JobName" ), ValueMetaInterface.TYPE_STRING, 255 ) );
  table.fields.add( new LogTableField( ID.STATUS.id, true, false, "STATUS", BaseMessages.getString( PKG, "JobLogTable.FieldName.Status" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.Status" ), ValueMetaInterface.TYPE_STRING, 15 ) );
  table.fields.add( new LogTableField( ID.LINES_READ.id, true, false, "LINES_READ", BaseMessages.getString( PKG, "JobLogTable.FieldName.LinesRead" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.LinesRead" ), ValueMetaInterface.TYPE_INTEGER, 18 ) );
  table.fields.add( new LogTableField( ID.LINES_WRITTEN.id, true, false, "LINES_WRITTEN", BaseMessages.getString( PKG, "JobLogTable.FieldName.LinesWritten" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.LinesWritten" ), ValueMetaInterface.TYPE_INTEGER, 18 ) );
  table.fields.add( new LogTableField( ID.LINES_UPDATED.id, true, false, "LINES_UPDATED", BaseMessages.getString( PKG, "JobLogTable.FieldName.LinesUpdated" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.LinesUpdated" ), ValueMetaInterface.TYPE_INTEGER, 18 ) );
  table.fields.add( new LogTableField( ID.LINES_INPUT.id, true, false, "LINES_INPUT", BaseMessages.getString( PKG, "JobLogTable.FieldName.LinesInput" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.LinesInput" ), ValueMetaInterface.TYPE_INTEGER, 18 ) );
  table.fields.add( new LogTableField( ID.LINES_OUTPUT.id, true, false, "LINES_OUTPUT", BaseMessages.getString( PKG, "JobLogTable.FieldName.LinesOutput" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.LinesOutput" ), ValueMetaInterface.TYPE_INTEGER, 18 ) );
  table.fields.add( new LogTableField( ID.LINES_REJECTED.id, true, false, "LINES_REJECTED", BaseMessages.getString( PKG, "JobLogTable.FieldName.LinesRejected" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.LinesRejected" ), ValueMetaInterface.TYPE_INTEGER, 18 ) );
  table.fields.add( new LogTableField( ID.ERRORS.id, true, false, "ERRORS", BaseMessages.getString( PKG, "JobLogTable.FieldName.Errors" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.Errors" ), ValueMetaInterface.TYPE_INTEGER, 18 ) );
  table.fields.add( new LogTableField( ID.STARTDATE.id, true, false, "STARTDATE", BaseMessages.getString( PKG, "JobLogTable.FieldName.StartDateRange" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.StartDateRange" ), ValueMetaInterface.TYPE_DATE, -1 ) );
  table.fields.add( new LogTableField( ID.ENDDATE.id, true, false, "ENDDATE", BaseMessages.getString( PKG, "JobLogTable.FieldName.EndDateRange" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.EndDateRange" ), ValueMetaInterface.TYPE_DATE, -1 ) );
  table.fields.add( new LogTableField( ID.LOGDATE.id, true, false, "LOGDATE", BaseMessages.getString( PKG, "JobLogTable.FieldName.LogDate" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.LogDate" ), ValueMetaInterface.TYPE_DATE, -1 ) );
  table.fields.add( new LogTableField( ID.DEPDATE.id, true, false, "DEPDATE", BaseMessages.getString( PKG, "JobLogTable.FieldName.DepDate" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.DepDate" ), ValueMetaInterface.TYPE_DATE, -1 ) );
  table.fields.add( new LogTableField( ID.REPLAYDATE.id, true, false, "REPLAYDATE", BaseMessages.getString( PKG, "JobLogTable.FieldName.ReplayDate" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.ReplayDate" ), ValueMetaInterface.TYPE_DATE, -1 ) );
  table.fields.add( new LogTableField( ID.LOG_FIELD.id, true, false, "LOG_FIELD", BaseMessages.getString( PKG, "JobLogTable.FieldName.LogField" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.LogField" ), ValueMetaInterface.TYPE_STRING, DatabaseMeta.CLOB_LENGTH ) );
  table.fields.add( new LogTableField( ID.EXECUTING_SERVER.id, false, false, "EXECUTING_SERVER", BaseMessages.getString( PKG, "JobLogTable.FieldName.ExecutingServer" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.ExecutingServer" ), ValueMetaInterface.TYPE_STRING, 255 ) );
  table.fields.add( new LogTableField( ID.EXECUTING_USER.id, false, false, "EXECUTING_USER", BaseMessages.getString( PKG, "JobLogTable.FieldName.ExecutingUser" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.ExecutingUser" ), ValueMetaInterface.TYPE_STRING, 255 ) );
  table.fields.add( new LogTableField( ID.START_JOB_ENTRY.id, false, false, "START_JOB_ENTRY", BaseMessages.getString( PKG, "JobLogTable.FieldName.StartingJobEntry" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.StartingJobEntry" ), ValueMetaInterface.TYPE_STRING, 255 ) );
  table.fields.add( new LogTableField( ID.CLIENT.id, false, false, "CLIENT", BaseMessages.getString( PKG, "JobLogTable.FieldName.Client" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.Client" ), ValueMetaInterface.TYPE_STRING, 255 ) );

  table.findField( ID.ID_JOB ).setKey( true );
  table.findField( ID.LOGDATE ).setLogDateField( true );
  table.findField( ID.LOG_FIELD ).setLogField( true );
  table.findField( ID.CHANNEL_ID ).setVisible( false );
  table.findField( ID.JOBNAME ).setVisible( false );
  table.findField( ID.STATUS ).setStatusField( true );
  table.findField( ID.ERRORS ).setErrorsField( true );
  table.findField( ID.JOBNAME ).setNameField( true );

  return table;
}
 
Example #21
Source Project: pentaho-kettle   Author: pentaho   File: PaloCubeDeleteDialog.java    License: Apache License 2.0 5 votes vote down vote up
private void ok() {
  jobEntry.setName( textStepName.getText() );
  jobEntry.setDatabaseMeta( DatabaseMeta.findDatabase( jobMeta.getDatabases(), addConnectionLine.getText() ) );
  jobEntry.setCubeName( comboCubeName.getText() );

  dispose();
}
 
Example #22
Source Project: pentaho-kettle   Author: pentaho   File: SynchronizeAfterMergeDialog.java    License: Apache License 2.0 5 votes vote down vote up
private void getSchemaNames() {
  DatabaseMeta databaseMeta = transMeta.findDatabase( wConnection.getText() );
  if ( databaseMeta != null ) {
    Database database = new Database( loggingObject, databaseMeta );
    try {
      database.connect();
      String[] schemas = database.getSchemas();

      if ( null != schemas && schemas.length > 0 ) {
        schemas = Const.sortStrings( schemas );
        EnterSelectionDialog dialog =
          new EnterSelectionDialog( shell, schemas, BaseMessages.getString(
            PKG, "SynchronizeAfterMergeDialog.AvailableSchemas.Title", wConnection.getText() ), BaseMessages
            .getString( PKG, "SynchronizeAfterMergeDialog.AvailableSchemas.Message", wConnection.getText() ) );
        String d = dialog.open();
        if ( d != null ) {
          wSchema.setText( Const.NVL( d, "" ) );
          setTableFieldCombo();
        }

      } else {
        MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR );
        mb.setMessage( BaseMessages.getString( PKG, "SynchronizeAfterMergeDialog.NoSchema.Error" ) );
        mb.setText( BaseMessages.getString( PKG, "SynchronizeAfterMergeDialog.GetSchemas.Error" ) );
        mb.open();
      }
    } catch ( Exception e ) {
      new ErrorDialog( shell, BaseMessages.getString( PKG, "System.Dialog.Error.Title" ), BaseMessages
        .getString( PKG, "SynchronizeAfterMergeDialog.ErrorGettingSchemas" ), e );
    } finally {
      database.disconnect();
    }
  }
}
 
Example #23
Source Project: pentaho-kettle   Author: pentaho   File: DatabaseLookupMeta.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException {
  streamKeyField1 = null;
  returnValueField = null;

  readData( stepnode, databases );
}
 
Example #24
Source Project: pentaho-kettle   Author: pentaho   File: CombinationLookupMeta.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public DatabaseMeta[] getUsedDatabaseConnections() {
  if ( databaseMeta != null ) {
    return new DatabaseMeta[] { databaseMeta };
  } else {
    return super.getUsedDatabaseConnections();
  }
}
 
Example #25
Source Project: pentaho-kettle   Author: pentaho   File: CreateDatabaseWizardPage2.java    License: Apache License 2.0 5 votes vote down vote up
public DatabaseMeta getDatabaseInfo() {
  if ( wUsername.getText() != null && wUsername.getText().length() > 0 ) {
    databaseMeta.setUsername( wUsername.getText() );
  }

  if ( wPassword.getText() != null && wPassword.getText().length() > 0 ) {
    databaseMeta.setPassword( wPassword.getText() );
  }

  wTest.setEnabled( ( (BaseDatabaseMeta) databaseMeta.getDatabaseInterface() ).canTest() );

  return databaseMeta;
}
 
Example #26
Source Project: pentaho-kettle   Author: pentaho   File: CreateDatabaseWizardPageInformix.java    License: Apache License 2.0 5 votes vote down vote up
public DatabaseMeta getDatabaseInfo() {
  if ( wServername.getText() != null && wServername.getText().length() > 0 ) {
    info.setServername( wServername.getText() );
  }

  return info;
}
 
Example #27
Source Project: pentaho-kettle   Author: pentaho   File: SpoonJobDelegate.java    License: Apache License 2.0 5 votes vote down vote up
private void setTransMetaNote( DatabaseMeta sourceDbInfo, DatabaseMeta targetDbInfo, String[] tables, int i,
                               TransMeta transMeta ) {
  String note =
    BaseMessages.getString( PKG, "Spoon.RipDB.Monitor.Note1" )
      + tables[i] + BaseMessages.getString( PKG, "Spoon.RipDB.Monitor.Note2" ) + sourceDbInfo + "]"
      + Const.CR;
  note +=
    BaseMessages.getString( PKG, "Spoon.RipDB.Monitor.Note3" )
      + tables[i] + BaseMessages.getString( PKG, "Spoon.RipDB.Monitor.Note4" ) + targetDbInfo + "]";
  NotePadMeta ni = new NotePadMeta( note, 150, 10, -1, -1 );
  transMeta.addNote( ni );
}
 
Example #28
Source Project: pentaho-kettle   Author: pentaho   File: JobEntryFTPSPUT.java    License: Apache License 2.0 5 votes vote down vote up
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
  List<SlaveServer> slaveServers ) throws KettleException {
  try {
    serverName = rep.getJobEntryAttributeString( id_jobentry, "servername" );
    serverPort = rep.getJobEntryAttributeString( id_jobentry, "serverport" );
    userName = rep.getJobEntryAttributeString( id_jobentry, "username" );
    password =
      Encr.decryptPasswordOptionallyEncrypted( rep.getJobEntryAttributeString( id_jobentry, "password" ) );
    remoteDirectory = rep.getJobEntryAttributeString( id_jobentry, "remoteDirectory" );
    localDirectory = rep.getJobEntryAttributeString( id_jobentry, "localDirectory" );
    wildcard = rep.getJobEntryAttributeString( id_jobentry, "wildcard" );
    binaryMode = rep.getJobEntryAttributeBoolean( id_jobentry, "binary" );
    timeout = (int) rep.getJobEntryAttributeInteger( id_jobentry, "timeout" );
    remove = rep.getJobEntryAttributeBoolean( id_jobentry, "remove" );
    onlyPuttingNewFiles = rep.getJobEntryAttributeBoolean( id_jobentry, "only_new" );
    activeConnection = rep.getJobEntryAttributeBoolean( id_jobentry, "active" );

    proxyHost = rep.getJobEntryAttributeString( id_jobentry, "proxy_host" );
    proxyPort = rep.getJobEntryAttributeString( id_jobentry, "proxy_port" );
    proxyUsername = rep.getJobEntryAttributeString( id_jobentry, "proxy_username" );
    proxyPassword = rep.getJobEntryAttributeString( id_jobentry, "proxy_password" );
    connectionType =
      FTPSConnection.getConnectionTypeByCode( Const.NVL( rep.getJobEntryAttributeString(
        id_jobentry, "connection_type" ), "" ) );
  } catch ( KettleException dbe ) {
    throw new KettleException( BaseMessages.getString( PKG, "JobFTPSPUT.UnableToLoadFromRepo", String
      .valueOf( id_jobentry ) ), dbe );
  }
}
 
Example #29
Source Project: pentaho-kettle   Author: pentaho   File: SQLFileOutputMeta.java    License: Apache License 2.0 5 votes vote down vote up
private void readData( Node stepnode, List<? extends SharedObjectInterface> databases ) throws KettleXMLException {
  try {

    String con = XMLHandler.getTagValue( stepnode, "connection" );
    databaseMeta = DatabaseMeta.findDatabase( databases, con );
    schemaName = XMLHandler.getTagValue( stepnode, "schema" );
    tablename = XMLHandler.getTagValue( stepnode, "table" );
    truncateTable = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "truncate" ) );
    createTable = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "create" ) );
    encoding = XMLHandler.getTagValue( stepnode, "encoding" );
    dateformat = XMLHandler.getTagValue( stepnode, "dateformat" );
    AddToResult = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "AddToResult" ) );

    StartNewLine = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "StartNewLine" ) );

    fileName = XMLHandler.getTagValue( stepnode, "file", "name" );
    createparentfolder =
      "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "file", "create_parent_folder" ) );
    extension = XMLHandler.getTagValue( stepnode, "file", "extention" );
    fileAppended = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "file", "append" ) );
    stepNrInFilename = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "file", "split" ) );
    partNrInFilename = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "file", "haspartno" ) );
    dateInFilename = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "file", "add_date" ) );
    timeInFilename = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "file", "add_time" ) );
    splitEvery = Const.toInt( XMLHandler.getTagValue( stepnode, "file", "splitevery" ), 0 );
    DoNotOpenNewFileInit =
      "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "file", "DoNotOpenNewFileInit" ) );

  } catch ( Exception e ) {
    throw new KettleXMLException( "Unable to load step info from XML", e );
  }
}
 
Example #30
Source Project: pentaho-kettle   Author: pentaho   File: SAPConnectionParamsHelper.java    License: Apache License 2.0 5 votes vote down vote up
public static SAPConnectionParams getFromDatabaseMeta( DatabaseMeta sapConnection ) {
  String name = sapConnection.getName();
  String host = sapConnection.environmentSubstitute( sapConnection.getHostname() );
  String sysnr =
    sapConnection.environmentSubstitute( sapConnection.getAttributes().getProperty( "SAPSystemNumber" ) );
  String client = sapConnection.environmentSubstitute( sapConnection.getAttributes().getProperty( "SAPClient" ) );
  String user = sapConnection.environmentSubstitute( sapConnection.getUsername() );
  String password = sapConnection.environmentSubstitute( sapConnection.getPassword() );
  String lang = "";
  return new SAPConnectionParams( name, host, sysnr, client, user, password, lang );
}