Java Code Examples for org.pentaho.di.trans.TransMeta#setDatabases()

The following examples show how to use org.pentaho.di.trans.TransMeta#setDatabases() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: StreamToTransNodeConverterTest.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@Test
public void filterPrivateDatabasesWithOnePrivateDatabaseTest() {
  IUnifiedRepository purMock = mock( IUnifiedRepository.class );
  TransMeta transMeta = new TransMeta(  );
  transMeta.setDatabases( getDummyDatabases() );
  Set<String> privateDatabases = new HashSet<>(  );
  privateDatabases.add( "database2" );
  transMeta.setPrivateDatabases( privateDatabases );
  StreamToTransNodeConverter transConverter = new StreamToTransNodeConverter( purMock );
  assertEquals( 1, transConverter.filterPrivateDatabases( transMeta ).getDatabases().size() );
}
 
Example 2
Source File: StreamToTransNodeConverterTest.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@Test
public void filterPrivateDatabasesWithOnePrivateDatabaseAndOneInUseTest() {
  IUnifiedRepository purMock = mock( IUnifiedRepository.class );
  TransMeta transMeta = spy( TransMeta.class );
  transMeta.setDatabases( getDummyDatabases() );
  Set<String> privateDatabases = new HashSet<>(  );
  privateDatabases.add( "database2" );
  transMeta.setPrivateDatabases( privateDatabases );
  when( transMeta.isDatabaseConnectionUsed( getDummyDatabases().get( 0 ) ) ).thenReturn( true );
  StreamToTransNodeConverter transConverter = new StreamToTransNodeConverter( purMock );
  assertEquals( 2, transConverter.filterPrivateDatabases( transMeta ).getDatabases().size() );
}
 
Example 3
Source File: StreamToTransNodeConverterTest.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@Test
public void filterPrivateDatabasesWithOneInUseTest() {
  IUnifiedRepository purMock = mock( IUnifiedRepository.class );
  TransMeta transMeta = spy( TransMeta.class );
  transMeta.setDatabases( getDummyDatabases() );
  transMeta.setPrivateDatabases( new HashSet<>(  ) );
  when( transMeta.isDatabaseConnectionUsed( getDummyDatabases().get( 0 ) ) ).thenReturn( true );
  StreamToTransNodeConverter transConverter = new StreamToTransNodeConverter( purMock );
  assertEquals( 1, transConverter.filterPrivateDatabases( transMeta ).getDatabases().size() );
}
 
Example 4
Source File: StreamToTransNodeConverterTest.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@Test
public void filterPrivateDatabasesNoPrivateDatabaseTest() {
  IUnifiedRepository purMock = mock( IUnifiedRepository.class );
  TransMeta transMeta = new TransMeta(  );
  transMeta.setDatabases( getDummyDatabases() );
  transMeta.setPrivateDatabases( new HashSet<>(  ) );
  StreamToTransNodeConverter transConverter = new StreamToTransNodeConverter( purMock );
  assertEquals( 0, transConverter.filterPrivateDatabases( transMeta ).getDatabases().size() );
}
 
Example 5
Source File: StreamToTransNodeConverterTest.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@Test
public void filterPrivateDatabasesNullPrivateDatabaseTest() {
  IUnifiedRepository purMock = mock( IUnifiedRepository.class );
  TransMeta transMeta = new TransMeta(  );
  transMeta.setDatabases( getDummyDatabases() );
  transMeta.setPrivateDatabases( null );
  StreamToTransNodeConverter transConverter = new StreamToTransNodeConverter( purMock );
  assertEquals( transMeta.getDatabases().size(), transConverter.filterPrivateDatabases( transMeta ).getDatabases().size() );
}
 
Example 6
Source File: TransSplitter.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
private TransMeta getOriginalCopy( boolean isSlaveTrans, ClusterSchema clusterSchema,
    SlaveServer slaveServer ) throws KettleException {
  TransMeta transMeta = new TransMeta();
  transMeta.setSlaveTransformation( true );

  if ( isSlaveTrans ) {
    transMeta.setName( getSlaveTransName( originalTransformation.getName(), clusterSchema, slaveServer ) );

    NotePadMeta slaveNote =
      new NotePadMeta( "This is a generated slave transformation.\nIt will be run on slave server: "
        + slaveServer, 0, 0, -1, -1 );
    transMeta.addNote( slaveNote );

    // add the slave partitioning schema's here.
    for ( int i = 0; i < referenceSteps.length; i++ ) {
      StepMeta stepMeta = referenceSteps[i];
      verifySlavePartitioningConfiguration( transMeta, stepMeta, clusterSchema, slaveServer );
    }
  } else {
    transMeta.setName( originalTransformation.getName() + " (master)" );

    NotePadMeta masterNote =
      new NotePadMeta( "This is a generated master transformation.\nIt will be run on server: "
        + getMasterServer(), 0, 0, -1, -1 );
    transMeta.addNote( masterNote );
  }

  // Copy the cluster schemas
  //
  for ( ClusterSchema schema : originalTransformation.getClusterSchemas() ) {
    transMeta.getClusterSchemas().add( schema.clone() );
  }

  transMeta.setDatabases( originalTransformation.getDatabases() );

  // Feedback
  transMeta.setFeedbackShown( originalTransformation.isFeedbackShown() );
  transMeta.setFeedbackSize( originalTransformation.getFeedbackSize() );

  // Priority management
  transMeta.setUsingThreadPriorityManagment( originalTransformation.isUsingThreadPriorityManagment() );

  // Unique connections
  transMeta.setUsingUniqueConnections( originalTransformation.isUsingUniqueConnections() );

  // Repository
  transMeta.setRepository( originalTransformation.getRepository() );
  transMeta.setRepositoryDirectory( originalTransformation.getRepositoryDirectory() );

  // Also set the logging details...
  transMeta.setTransLogTable( (TransLogTable) originalTransformation.getTransLogTable().clone() );

  // Rowset size
  transMeta.setSizeRowset( originalTransformation.getSizeRowset() );

  return transMeta;
}