Java Code Examples for org.pentaho.di.trans.TransMeta#setRepositoryDirectory()

The following examples show how to use org.pentaho.di.trans.TransMeta#setRepositoryDirectory() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TransMetaConverterTest.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
@Test
public void testIncludesSubTransformationsFromRepository() throws Exception {
  TransMeta parentTransMeta = new TransMeta( getClass().getResource( "trans-meta-converter-parent.ktr" ).getPath() );
  Repository repository = mock( Repository.class );
  TransMeta transMeta = new TransMeta( );
  RepositoryDirectoryInterface repositoryDirectory = new RepositoryDirectory( null, "public");
  String directory = getClass().getResource( "" ).toString().replace( File.separator, "/" );
  when( repository.findDirectory( "public" ) ).thenReturn( repositoryDirectory );
  when( repository.loadTransformation( "trans-meta-converter-sub.ktr", repositoryDirectory, null, true, null ) ).thenReturn( transMeta );
  parentTransMeta.setRepository( repository );
  parentTransMeta.setRepositoryDirectory( repositoryDirectory );
  parentTransMeta.setVariable( "Internal.Entry.Current.Directory", "public" );
  Transformation transformation = TransMetaConverter.convert( parentTransMeta );

  @SuppressWarnings( { "unchecked", "ConstantConditions" } )
  HashMap<String, Transformation> config =
    (HashMap<String, Transformation>) transformation.getConfig( TransMetaConverter.SUB_TRANSFORMATIONS_KEY ).get();
  assertEquals( 1, config.size() );
  assertNotNull( config.get( "public/trans-meta-converter-sub.ktr" ) );
}
 
Example 2
Source File: SpoonJobDelegate.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
@VisibleForTesting
void setTransMetaFileNaming( RepositoryDirectoryInterface repdir, String directory,
                                     DatabaseMeta sourceDbInfo, DatabaseMeta targetDbInfo, String[] tables, int i,
                                     TransMeta transMeta ) {
  String transname =
    BaseMessages.getString( PKG, "Spoon.RipDB.Monitor.Transname1" )
      + sourceDbInfo + "].[" + tables[i]
      + BaseMessages.getString( PKG, "Spoon.RipDB.Monitor.Transname2" ) + targetDbInfo + "]";

  if ( repdir != null ) {
    transMeta.setRepositoryDirectory( repdir );
    transMeta.setName( transname );
  } else {
    transMeta.setFilename( Const.createFilename( directory, transname, "."
      + Const.STRING_TRANS_DEFAULT_EXT ) );
  }
}
 
Example 3
Source File: PurRepository.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
private TransMeta buildTransMeta( final RepositoryFile file, final RepositoryDirectoryInterface parentDir,
                                  final NodeRepositoryFileData data, final ObjectRevision revision )
  throws KettleException {
  TransMeta transMeta = new TransMeta();
  transMeta.setName( file.getTitle() );
  transMeta.setFilename( file.getName() );
  transMeta.setDescription( file.getDescription() );
  transMeta.setObjectId( new StringObjectId( file.getId().toString() ) );
  transMeta.setObjectRevision( revision );
  transMeta.setRepository( this );
  transMeta.setRepositoryDirectory( parentDir );
  transMeta.setMetaStore( getMetaStore() );
  readTransSharedObjects( transMeta ); // This should read from the local cache
  transDelegate.dataNodeToElement( data.getNode(), transMeta );
  transMeta.clearChanged();
  return transMeta;
}
 
Example 4
Source File: GetRepositoryNamesTest.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
private static void prepareFileRepository() throws IOException, KettleException {
  baseDirName = Files.createTempDirectory( "GetRepositoryNamesIT" );
  RepositoryMeta repoMeta =
      new KettleFileRepositoryMeta( UUID.randomUUID().toString(), UUID.randomUUID().toString(), UUID.randomUUID()
          .toString(), baseDirName.toString() );
  repo = new KettleFileRepository();
  repo.init( repoMeta );
  repo.connect( null, null );

  // Populate
  RepositoryDirectoryInterface rootDir = repo.findDirectory( "/" );

  RepositoryDirectoryInterface subdir1 = new RepositoryDirectory( rootDir, "subdir1" );
  repo.saveRepositoryDirectory( subdir1 );

  TransMeta transMeta1 = new TransMeta();
  transMeta1.setName( "Trans1" );
  transMeta1.setRepositoryDirectory( subdir1 );
  repo.save( transMeta1, null, null );

  JobMeta jobMeta1 = new JobMeta();
  jobMeta1.setName( "Job1" );
  jobMeta1.setRepositoryDirectory( subdir1 );
  repo.save( jobMeta1, null, null );

  RepositoryDirectoryInterface subdir2 = new RepositoryDirectory( subdir1, "subdir2" );
  repo.saveRepositoryDirectory( subdir2 );

  TransMeta transMeta2 = new TransMeta();
  transMeta2.setName( "Trans2" );
  transMeta2.setRepositoryDirectory( subdir2 );
  repo.save( transMeta2, null, null );

  JobMeta jobMeta2 = new JobMeta();
  jobMeta2.setName( "Job2" );
  jobMeta2.setRepositoryDirectory( subdir2 );
  repo.save( jobMeta2, null, null );
}
 
Example 5
Source File: SharedObjectSyncUtilTest.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
private TransMeta createTransMeta() throws KettleException {
  TransMeta transMeta = new TransMeta();
  transMeta.setName( UUID.randomUUID().toString() );
  transMeta.setFilename( UUID.randomUUID().toString() );
  RepositoryDirectory repositoryDirectory = mock( RepositoryDirectory.class );
  doCallRealMethod().when( repositoryDirectory ).setName( anyString() );
  doCallRealMethod().when( repositoryDirectory ).getName();
  transMeta.setRepositoryDirectory( repositoryDirectory );
  initSharedObjects( transMeta, SHARED_OBJECTS_FILE );
  when( spoon.getActiveMeta() ).thenReturn( transMeta );
  return transMeta;
}
 
Example 6
Source File: PurRepository_RepositoryDirectory_IT.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@Before
public void setup() throws Exception {

  transMeta = new TransMeta();
  transMeta.setName( "Test" );
  transMeta.setRepositoryDirectory( purRepository.getDefaultSaveDirectory( transMeta ) );
  final Calendar date = Calendar.getInstance();
  date.setTimeInMillis( 0 );
  purRepository.save( transMeta, "test", date, null, false );
  createPurRepository();

  defaultSaveDirectory = purRepository.getDefaultSaveDirectory( transMeta );
  purRepository.createRepositoryDirectory( defaultSaveDirectory, "test dir" );
  defaultSaveDirectory = purRepository.getDefaultSaveDirectory( transMeta );
}
 
Example 7
Source File: RepositoryUnitIT.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
/**
 * This test is to ensure that the metadata for the GetXMLData step is preserved when saving to a
 * repository. The test creates a GetXMLData step and saves it to the repository. Then the local
 * data is changed and the step is read back in from the repository. It is then asserted that the
 * field value(s) are equal to what was saved.
 *
 * Test method for
 * {@link org.pentaho.di.trans.steps.getxmldata.GetXMLDataMeta#readRep(org.pentaho.di.repository.Repository,
 * org.pentaho.di.repository.ObjectId, java.util.List, java.util.Map)}
 * . Test method for
 * {@link org.pentaho.di.trans.steps.getxmldata.GetXMLDataMeta#saveRep(org.pentaho.di.repository.Repository,
 * org.pentaho.di.repository.ObjectId, jorg.pentaho.di.repository.ObjectId)}
 * .
 */
@Test
public void testGetXMLDataMetaSaveAndReadRep() {

  //
  // Create a new transformation...
  //
  TransMeta transMeta = new TransMeta();
  transMeta.setName( "getxmldata1" );

  //
  // Create a Get XML Data step
  //
  String getXMLDataName = "get xml data step";
  GetXMLDataMeta gxdm = new GetXMLDataMeta();

  String getXMLDataPid = registry.getPluginId( StepPluginType.class, gxdm );
  StepMeta getXMLDataStep = new StepMeta( getXMLDataPid, getXMLDataName, gxdm );
  transMeta.addStep( getXMLDataStep );

  GetXMLDataField[] fields = new GetXMLDataField[1];

  for ( int idx = 0; idx < fields.length; idx++ ) {
    fields[idx] = new GetXMLDataField();
  }

  fields[0].setName( "objectid" );
  fields[0].setXPath( "ObjectID" );
  fields[0].setElementType( GetXMLDataField.ELEMENT_TYPE_NODE );
  fields[0].setResultType( GetXMLDataField.RESULT_TYPE_TYPE_SINGLE_NODE );
  fields[0].setType( ValueMetaInterface.TYPE_STRING );
  fields[0].setFormat( "" );
  fields[0].setLength( -1 );
  fields[0].setPrecision( -1 );
  fields[0].setCurrencySymbol( "" );
  fields[0].setDecimalSymbol( "" );
  fields[0].setGroupSymbol( "" );
  fields[0].setTrimType( GetXMLDataField.TYPE_TRIM_NONE );

  gxdm.setDefault();
  gxdm.setEncoding( "UTF-8" );
  gxdm.setIsAFile( false );
  gxdm.setInFields( true );
  gxdm.setLoopXPath( "/" );
  gxdm.setXMLField( "field1" );
  gxdm.setInputFields( fields );

  try {
    // Now save the transformation and then read it back in
    transMeta.setRepository( repository );
    RepositoryDirectoryInterface repositoryDirectory = repository.findDirectory( "/" );
    transMeta.setRepositoryDirectory( repositoryDirectory );
    repository.transDelegate.saveTransformation( transMeta, "None", null, true );

    // Create a new placeholder meta and set the result type to something different than what was
    // saved,
    // to ensure the saveRep code is working correctly.
    GetXMLDataMeta newMeta = (GetXMLDataMeta) gxdm.clone();
    for ( GetXMLDataField f : newMeta.getInputFields() ) {
      f.setResultType( GetXMLDataField.RESULT_TYPE_VALUE_OF );
    }
    newMeta.readRep( repository, new MemoryMetaStore(), getXMLDataStep.getObjectId(), repository.getDatabases() );

    // Check that the value of Result Type is what was saved in the repo
    assertEquals( newMeta.getInputFields()[0].getResultTypeCode(), "singlenode" );

  } catch ( KettleException e ) {
    fail( "Test failed due to exception: " + e.getLocalizedMessage() );
  }
}
 
Example 8
Source File: KettleDatabaseRepositoryIT.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
protected void verifyTransformationSamples( RepositoryDirectoryInterface samplesDirectory ) throws Exception {
  File transSamplesFolder = new File( "samples/transformations/" );
  String[] files = transSamplesFolder.list( new FilenameFilter() {
    public boolean accept( File dir, String name ) {
      return name.endsWith( ".ktr" ) && !name.contains( "HL7" );
    }
  } );
  Arrays.sort( files );

  for ( String file : files ) {
    String transFilename = transSamplesFolder.getAbsolutePath() + "/" + file;
    System.out.println( "Storing/Loading/validating transformation '" + transFilename + "'" );

    // Load the TransMeta object...
    //
    TransMeta transMeta = new TransMeta( transFilename );
    if ( Utils.isEmpty( transMeta.getName() ) ) {
      transMeta.setName( Const.createName( file ) );
    }

    // Save it in the repository in the samples folder
    //
    transMeta.setRepositoryDirectory( samplesDirectory );
    repository.save( transMeta, "unit testing", null, true );
    assertNotNull( transMeta.getObjectId() );

    // Load it back up again...
    //
    TransMeta repTransMeta = repository.loadTransformation( transMeta.getObjectId(), null );
    String oneXml = repTransMeta.getXML();

    // Save & load it again
    //
    repository.save( transMeta, "unit testing", null, true );
    repTransMeta = repository.loadTransformation( transMeta.getObjectId(), null );
    String twoXml = repTransMeta.getXML();

    // The XML needs to be identical after loading
    //
    // storeFile(oneXml, "/tmp/one.ktr");
    // storeFile(twoXml, "/tmp/two.ktr");

    assertEquals( oneXml, twoXml );
  }

  // Verify the number of stored files, see if we can find them all again.
  //
  System.out.println( "Stored "
    + files.length + " transformation samples in folder " + samplesDirectory.getPath() );
  String[] transformationNames = repository.getTransformationNames( samplesDirectory.getObjectId(), false );
  assertEquals( files.length, transformationNames.length );
}
 
Example 9
Source File: KettleFileRepositoryIT.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
private void verifyTransformationSamples( RepositoryDirectoryInterface samplesDirectory ) throws Exception {
  File transSamplesFolder = new File( "samples/transformations/" );
  String[] files = transSamplesFolder.list( new FilenameFilter() {
    @Override
    public boolean accept( File dir, String name ) {
      return name.endsWith( ".ktr" ) && !name.contains( "HL7" );
    }
  } );
  Arrays.sort( files );

  for ( String file : files ) {
    String transFilename = transSamplesFolder.getAbsolutePath() + "/" + file;
    System.out.println( "Storing/Loading/validating transformation '" + transFilename + "'" );

    // Load the TransMeta object...
    //
    TransMeta transMeta = new TransMeta( transFilename );
    transMeta.setFilename( null );

    // The name is sometimes empty in the file, duplicates are present too...
    // Replaces slashes and the like as well...
    //
    transMeta.setName( Const.createName( file ) );
    transMeta.setName( transMeta.getName().replace( '/', '-' ) );

    // Save it in the repository in the samples folder
    //
    transMeta.setRepositoryDirectory( samplesDirectory );
    repository.save( transMeta, "unit testing" );
    assertNotNull( transMeta.getObjectId() );

    // Load it back up again...
    //
    TransMeta repTransMeta = repository.loadTransformation( transMeta.getObjectId(), null );
    String oneXml = repTransMeta.getXML();

    // Save & load it again
    //
    repository.save( transMeta, "unit testing" );
    repTransMeta = repository.loadTransformation( transMeta.getObjectId(), null );
    String twoXml = repTransMeta.getXML();

    // The XML needs to be identical after loading
    //
    // storeFile(oneXml, "/tmp/one.ktr");
    // storeFile(twoXml, "/tmp/two.ktr");

    assertEquals( oneXml, twoXml );
  }

  // Verify the number of stored files, see if we can find them all again.
  //
  System.out.println( "Stored "
    + files.length + " transformation samples in folder " + samplesDirectory.getPath() );
  String[] transformationNames = repository.getTransformationNames( samplesDirectory.getObjectId(), false );
  assertEquals( files.length, transformationNames.length );
}
 
Example 10
Source File: JobEntryTrans.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
/**
 * We're going to load the transformation meta data referenced here. Then we're going to give it a new filename,
 * modify that filename in this entries. The parent caller will have made a copy of it, so it should be OK to do so.
 * <p/>
 * Exports the object to a flat-file system, adding content with filename keys to a set of definitions. The supplied
 * resource naming interface allows the object to name appropriately without worrying about those parts of the
 * implementation specific details.
 *
 * @param space           The variable space to resolve (environment) variables with.
 * @param definitions     The map containing the filenames and content
 * @param namingInterface The resource naming interface allows the object to be named appropriately
 * @param repository      The repository to load resources from
 * @param metaStore       the metaStore to load external metadata from
 * @return The filename for this object. (also contained in the definitions map)
 * @throws KettleException in case something goes wrong during the export
 */
@Override
public String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions,
                               ResourceNamingInterface namingInterface, Repository repository, IMetaStore metaStore ) throws KettleException {
  // Try to load the transformation from repository or file.
  // Modify this recursively too...
  //
  // AGAIN: there is no need to clone this job entry because the caller is responsible for this.
  //
  // First load the transformation metadata...
  //
  copyVariablesFrom( space );
  TransMeta transMeta = getTransMeta( repository, space );

  // Also go down into the transformation and export the files there. (mapping recursively down)
  //
  String proposedNewFilename =
    transMeta.exportResources( transMeta, definitions, namingInterface, repository, metaStore );

  // To get a relative path to it, we inject ${Internal.Entry.Current.Directory}
  //
  String newFilename = "${" + Const.INTERNAL_VARIABLE_ENTRY_CURRENT_DIRECTORY + "}/" + proposedNewFilename;

  // Set the correct filename inside the XML.
  //
  transMeta.setFilename( newFilename );

  // exports always reside in the root directory, in case we want to turn this into a file repository...
  //
  transMeta.setRepositoryDirectory( new RepositoryDirectory() );

  // export to filename ALWAYS (this allows the exported XML to be executed remotely)
  //
  setSpecificationMethod( ObjectLocationSpecificationMethod.FILENAME );

  // change it in the job entry
  //
  filename = newFilename;

  return proposedNewFilename;
}
 
Example 11
Source File: RepositoryImporter.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
/**
 *
 * @param transnode
 *          The XML DOM node to read the transformation from
 * @return false if the import should be canceled.
 * @throws KettleException
 *           in case there is an unexpected error
 */
protected boolean importTransformation( Node transnode, RepositoryImportFeedbackInterface feedback )
  throws KettleException {
  //
  // Load transformation from XML into a directory, possibly created!
  //
  TransMeta transMeta = createTransMetaForNode( transnode ); // ignore shared objects
  feedback.setLabel( BaseMessages.getString( PKG, "RepositoryImporter.ImportTrans.Label", Integer
      .toString( transformationNumber ), transMeta.getName() ) );

  validateImportedElement( importRules, transMeta );

  // What's the directory path?
  String directoryPath = Const.NVL( XMLHandler.getTagValue( transnode, "info", "directory" ), Const.FILE_SEPARATOR );
  if ( transDirOverride != null ) {
    directoryPath = transDirOverride;
  }

  if ( directoryPath.startsWith( "/" ) ) {
    // remove the leading root, we don't need it.
    directoryPath = directoryPath.substring( 1 );
  }

  // If we have a set of source directories to limit ourselves to, consider this.
  //
  if ( limitDirs.size() > 0 && Const.indexOfString( directoryPath, limitDirs ) < 0 ) {
    // Not in the limiting set of source directories, skip the import of this transformation...
    //
    feedback.addLog( BaseMessages.getString( PKG,
        "RepositoryImporter.SkippedTransformationNotPartOfLimitingDirectories.Log", transMeta.getName() ) );
    return true;
  }

  RepositoryDirectoryInterface targetDirectory = getTargetDirectory( directoryPath, transDirOverride, feedback );

  // OK, we loaded the transformation from XML and all went well...
  // See if the transformation already existed!
  ObjectId existingId = rep.getTransformationID( transMeta.getName(), targetDirectory );
  if ( existingId != null && askOverwrite ) {
    overwrite = feedback.transOverwritePrompt( transMeta );
    askOverwrite = feedback.isAskingOverwriteConfirmation();
  } else {
    updateDisplay();
  }

  if ( existingId == null || overwrite ) {
    replaceSharedObjects( transMeta );
    transMeta.setObjectId( existingId );
    transMeta.setRepositoryDirectory( targetDirectory );
    patchTransSteps( transMeta );

    try {
      // Keep info on who & when this transformation was created...
      if ( transMeta.getCreatedUser() == null || transMeta.getCreatedUser().equals( "-" ) ) {
        transMeta.setCreatedDate( new Date() );
        if ( rep.getUserInfo() != null ) {
          transMeta.setCreatedUser( rep.getUserInfo().getLogin() );
        } else {
          transMeta.setCreatedUser( null );
        }
      }
      saveTransMeta( transMeta );
      feedback.addLog( BaseMessages.getString( PKG, "RepositoryImporter.TransSaved.Log", Integer
          .toString( transformationNumber ), transMeta.getName() ) );

      if ( transMeta.hasRepositoryReferences() ) {
        referencingObjects.add( new RepositoryObject( transMeta.getObjectId(), transMeta.getName(), transMeta
            .getRepositoryDirectory(), null, null, RepositoryObjectType.TRANSFORMATION, null, false ) );
      }

    } catch ( Exception e ) {
      feedback.addLog( BaseMessages.getString( PKG, "RepositoryImporter.ErrorSavingTrans.Log", Integer
          .toString( transformationNumber ), transMeta.getName(), Const.getStackTracker( e ) ) );

      if ( !feedback.askContinueOnErrorQuestion( BaseMessages.getString( PKG,
          "RepositoryImporter.DoYouWantToContinue.Title" ), BaseMessages.getString( PKG,
          "RepositoryImporter.DoYouWantToContinue.Message" ) ) ) {
        return false;
      }
    }
  } else {
    feedback.addLog( BaseMessages.getString( PKG, "RepositoryImporter.SkippedExistingTransformation.Log", transMeta
        .getName() ) );
  }
  return true;
}
 
Example 12
Source File: TransSplitter.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
private TransMeta getOriginalCopy( boolean isSlaveTrans, ClusterSchema clusterSchema,
    SlaveServer slaveServer ) throws KettleException {
  TransMeta transMeta = new TransMeta();
  transMeta.setSlaveTransformation( true );

  if ( isSlaveTrans ) {
    transMeta.setName( getSlaveTransName( originalTransformation.getName(), clusterSchema, slaveServer ) );

    NotePadMeta slaveNote =
      new NotePadMeta( "This is a generated slave transformation.\nIt will be run on slave server: "
        + slaveServer, 0, 0, -1, -1 );
    transMeta.addNote( slaveNote );

    // add the slave partitioning schema's here.
    for ( int i = 0; i < referenceSteps.length; i++ ) {
      StepMeta stepMeta = referenceSteps[i];
      verifySlavePartitioningConfiguration( transMeta, stepMeta, clusterSchema, slaveServer );
    }
  } else {
    transMeta.setName( originalTransformation.getName() + " (master)" );

    NotePadMeta masterNote =
      new NotePadMeta( "This is a generated master transformation.\nIt will be run on server: "
        + getMasterServer(), 0, 0, -1, -1 );
    transMeta.addNote( masterNote );
  }

  // Copy the cluster schemas
  //
  for ( ClusterSchema schema : originalTransformation.getClusterSchemas() ) {
    transMeta.getClusterSchemas().add( schema.clone() );
  }

  transMeta.setDatabases( originalTransformation.getDatabases() );

  // Feedback
  transMeta.setFeedbackShown( originalTransformation.isFeedbackShown() );
  transMeta.setFeedbackSize( originalTransformation.getFeedbackSize() );

  // Priority management
  transMeta.setUsingThreadPriorityManagment( originalTransformation.isUsingThreadPriorityManagment() );

  // Unique connections
  transMeta.setUsingUniqueConnections( originalTransformation.isUsingUniqueConnections() );

  // Repository
  transMeta.setRepository( originalTransformation.getRepository() );
  transMeta.setRepositoryDirectory( originalTransformation.getRepositoryDirectory() );

  // Also set the logging details...
  transMeta.setTransLogTable( (TransLogTable) originalTransformation.getTransLogTable().clone() );

  // Rowset size
  transMeta.setSizeRowset( originalTransformation.getSizeRowset() );

  return transMeta;
}
 
Example 13
Source File: MetaInject.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
/**
 * Writes the generated meta injection transformation to the repository. It is assumed that the repository
 * exists (user is connected).
 * @param targetFilePath the repo path to which to save the generated injection ktr
 * @throws KettleException
 */
private void writeInjectedKtrToRepo( final String targetFilePath ) throws KettleException {

  try {
    repoSaveLock.lock();

    // clone the transMeta associated with the data, this is the generated meta injection transformation
    final TransMeta generatedTrans = (TransMeta) data.transMeta.clone();
    // the targetFilePath holds the absolute repo path that is the requested destination of this generated
    // transformation, extract the file name (no extension) and the containing directory and adjust the generated
    // transformation properties accordingly
    List<String> targetPath = new ArrayList( Arrays.asList( Const.splitPath( targetFilePath,
      RepositoryDirectory.DIRECTORY_SEPARATOR  ) ) );
    final String fileName = targetPath.get( targetPath.size() - 1 ).replace( ".ktr", "" );
    generatedTrans.setName( fileName );
    // remove the last targetPath element, so we're left with the target directory path
    targetPath.remove( targetPath.size() - 1 );
    if ( targetPath.size() > 0 ) {
      final String dirPath = String.join( RepositoryDirectory.DIRECTORY_SEPARATOR, targetPath );
      RepositoryDirectoryInterface directory = getRepository().findDirectory( dirPath );
      // if the directory does not exist, try to create it
      if ( directory == null ) {
        directory = getRepository().createRepositoryDirectory( new RepositoryDirectory( null, "/" ), dirPath );
      }
      generatedTrans.setRepositoryDirectory( directory );
    } else {
      // if the directory is null, set it to the directory of the cloned template ktr
      if ( log.isDebug() ) {
        log.logDebug( "The target injection ktr file path provided by the user is not a valid fully qualified "
          + "repository path - will store the generated ktr in the same directory as the template ktr: ",
          data.transMeta.getRepositoryDirectory() );
      }
      generatedTrans.setRepositoryDirectory( data.transMeta.getRepositoryDirectory() );
    }
    // set the objectId, in case the injected transformation already exists in the repo, so that is is updated in
    // the repository - the objectId will remain null, if the transformation is begin generated for the first time,
    // in which a new ktr will be created in the repo
    generatedTrans.setObjectId( getRepository().getTransformationID( fileName, generatedTrans.getRepositoryDirectory() ) );
    getRepository().save( generatedTrans, null, null, true );

  } finally {
    repoSaveLock.unlock();
  }
}
 
Example 14
Source File: MetaInjectMeta.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
@Override
public String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions,
                               ResourceNamingInterface resourceNamingInterface, Repository repository,
                               IMetaStore metaStore )
  throws KettleException {
  try {
    // Try to load the transformation from repository or file.
    // Modify this recursively too...
    //
    // NOTE: there is no need to clone this step because the caller is
    // responsible for this.
    //
    // First load the executor transformation metadata...
    //
    TransMeta executorTransMeta = loadTransformationMeta( repository, space );

    // Also go down into the mapping transformation and export the files
    // there. (mapping recursively down)
    //
    String proposedNewFilename =
      executorTransMeta.exportResources( executorTransMeta, definitions, resourceNamingInterface, repository,
        metaStore );

    // To get a relative path to it, we inject
    // ${Internal.Entry.Current.Directory}
    //
    String newFilename =
      "${" + Const.INTERNAL_VARIABLE_ENTRY_CURRENT_DIRECTORY + "}/" + proposedNewFilename;

    // Set the correct filename inside the XML.
    //
    executorTransMeta.setFilename( newFilename );

    // exports always reside in the root directory, in case we want to turn
    // this into a file repository...
    //
    executorTransMeta.setRepositoryDirectory( new RepositoryDirectory() );

    // change it in the entry
    //
    fileName = newFilename;

    setSpecificationMethod( ObjectLocationSpecificationMethod.FILENAME );

    return proposedNewFilename;
  } catch ( Exception e ) {
    throw new KettleException( BaseMessages.getString( PKG, "MetaInjectMeta.Exception.UnableToLoadTrans",
      fileName ) );
  }
}
 
Example 15
Source File: RepositoryTestBase.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
protected TransMeta createTransMeta( final String dbName ) throws Exception {
  RepositoryDirectoryInterface rootDir = loadStartDirectory();
  TransMeta transMeta = new TransMeta();
  transMeta.setName( EXP_TRANS_NAME.concat( dbName ) );
  transMeta.setDescription( EXP_TRANS_DESC );
  transMeta.setExtendedDescription( EXP_TRANS_EXTENDED_DESC );
  transMeta.setRepositoryDirectory( rootDir.findDirectory( DIR_TRANSFORMATIONS ) );
  transMeta.setTransversion( EXP_TRANS_VERSION );
  transMeta.setTransstatus( EXP_TRANS_STATUS );
  transMeta.setCreatedUser( EXP_TRANS_CREATED_USER );
  transMeta.setCreatedDate( EXP_TRANS_CREATED_DATE );
  transMeta.setModifiedUser( EXP_TRANS_MOD_USER );
  transMeta.setModifiedDate( EXP_TRANS_MOD_DATE );
  transMeta.addParameterDefinition( EXP_TRANS_PARAM_1_NAME, EXP_TRANS_PARAM_1_DEF, EXP_TRANS_PARAM_1_DESC );

  // TODO mlowery other transLogTable fields could be set for testing here
  TransLogTable transLogTable = TransLogTable.getDefault( transMeta, transMeta, new ArrayList<StepMeta>( 0 ) );
  transLogTable.setConnectionName( EXP_TRANS_LOG_TABLE_CONN_NAME );
  transLogTable.setLogInterval( EXP_TRANS_LOG_TABLE_INTERVAL );
  transLogTable.setSchemaName( EXP_TRANS_LOG_TABLE_SCHEMA_NAME );
  transLogTable.setLogSizeLimit( EXP_TRANS_LOG_TABLE_SIZE_LIMIT );
  transLogTable.setTableName( EXP_TRANS_LOG_TABLE_TABLE_NAME );
  transLogTable.setTimeoutInDays( EXP_TRANS_LOG_TABLE_TIMEOUT_IN_DAYS );
  transMeta.setTransLogTable( transLogTable );
  // TODO mlowery other perfLogTable fields could be set for testing here
  PerformanceLogTable perfLogTable = PerformanceLogTable.getDefault( transMeta, transMeta );
  perfLogTable.setConnectionName( EXP_TRANS_LOG_TABLE_CONN_NAME );
  perfLogTable.setLogInterval( EXP_TRANS_LOG_TABLE_INTERVAL );
  perfLogTable.setSchemaName( EXP_TRANS_LOG_TABLE_SCHEMA_NAME );
  perfLogTable.setTableName( EXP_TRANS_LOG_TABLE_TABLE_NAME );
  perfLogTable.setTimeoutInDays( EXP_TRANS_LOG_TABLE_TIMEOUT_IN_DAYS );
  transMeta.setPerformanceLogTable( perfLogTable );
  // TODO mlowery other channelLogTable fields could be set for testing here
  ChannelLogTable channelLogTable = ChannelLogTable.getDefault( transMeta, transMeta );
  channelLogTable.setConnectionName( EXP_TRANS_LOG_TABLE_CONN_NAME );
  channelLogTable.setSchemaName( EXP_TRANS_LOG_TABLE_SCHEMA_NAME );
  channelLogTable.setTableName( EXP_TRANS_LOG_TABLE_TABLE_NAME );
  channelLogTable.setTimeoutInDays( EXP_TRANS_LOG_TABLE_TIMEOUT_IN_DAYS );
  transMeta.setChannelLogTable( channelLogTable );
  // TODO mlowery other stepLogTable fields could be set for testing here
  StepLogTable stepLogTable = StepLogTable.getDefault( transMeta, transMeta );
  stepLogTable.setConnectionName( EXP_TRANS_LOG_TABLE_CONN_NAME );
  stepLogTable.setSchemaName( EXP_TRANS_LOG_TABLE_SCHEMA_NAME );
  stepLogTable.setTableName( EXP_TRANS_LOG_TABLE_TABLE_NAME );
  stepLogTable.setTimeoutInDays( EXP_TRANS_LOG_TABLE_TIMEOUT_IN_DAYS );
  transMeta.setStepLogTable( stepLogTable );
  DatabaseMeta dbMeta = createDatabaseMeta( dbName );
  // dbMeta must be saved so that it gets an ID
  repository.save( dbMeta, VERSION_COMMENT_V1, null );
  deleteStack.push( dbMeta );
  transMeta.setMaxDateConnection( dbMeta );
  transMeta.setMaxDateTable( EXP_TRANS_MAX_DATE_TABLE );
  transMeta.setMaxDateField( EXP_TRANS_MAX_DATE_FIELD );
  transMeta.setMaxDateOffset( EXP_TRANS_MAX_DATE_OFFSET );
  transMeta.setMaxDateDifference( EXP_TRANS_MAX_DATE_DIFF );
  transMeta.setSizeRowset( EXP_TRANS_SIZE_ROWSET );
  transMeta.setSleepTimeEmpty( EXP_TRANS_SLEEP_TIME_EMPTY );
  transMeta.setSleepTimeFull( EXP_TRANS_SLEEP_TIME_FULL );
  transMeta.setUsingUniqueConnections( EXP_TRANS_USING_UNIQUE_CONN );
  transMeta.setFeedbackShown( EXP_TRANS_FEEDBACK_SHOWN );
  transMeta.setFeedbackSize( EXP_TRANS_FEEDBACK_SIZE );
  transMeta.setUsingThreadPriorityManagment( EXP_TRANS_USING_THREAD_PRIORITY_MGMT );
  transMeta.setSharedObjectsFile( EXP_TRANS_SHARED_OBJECTS_FILE );
  transMeta.setCapturingStepPerformanceSnapShots( EXP_TRANS_CAPTURE_STEP_PERF_SNAPSHOTS );
  transMeta.setStepPerformanceCapturingDelay( EXP_TRANS_STEP_PERF_CAP_DELAY );
  transMeta.addDependency( new TransDependency( dbMeta, EXP_TRANS_DEP_TABLE_NAME, EXP_TRANS_DEP_FIELD_NAME ) );
  DatabaseMeta stepDbMeta = createDatabaseMeta( EXP_DBMETA_NAME_STEP.concat( dbName ) );
  repository.save( stepDbMeta, VERSION_COMMENT_V1, null );
  deleteStack.push( stepDbMeta );
  Condition cond = new Condition();
  StepMeta step1 = createStepMeta1( transMeta, stepDbMeta, cond );
  transMeta.addStep( step1 );
  StepMeta step2 = createStepMeta2( stepDbMeta, cond );
  transMeta.addStep( step2 );
  transMeta.addTransHop( createTransHopMeta( step1, step2 ) );

  SlaveServer slaveServer = createSlaveServer( dbName );
  PartitionSchema partSchema = createPartitionSchema( dbName );
  // slaveServer, partSchema must be saved so that they get IDs
  repository.save( slaveServer, VERSION_COMMENT_V1, null );
  deleteStack.push( slaveServer );
  repository.save( partSchema, VERSION_COMMENT_V1, null );
  deleteStack.push( partSchema );

  SlaveStepCopyPartitionDistribution slaveStepCopyPartitionDistribution = new SlaveStepCopyPartitionDistribution();
  slaveStepCopyPartitionDistribution.addPartition( EXP_SLAVE_NAME, EXP_PART_SCHEMA_NAME, 0 );
  slaveStepCopyPartitionDistribution.setOriginalPartitionSchemas( Arrays
      .asList( new PartitionSchema[] { partSchema } ) );
  transMeta.setSlaveStepCopyPartitionDistribution( slaveStepCopyPartitionDistribution );
  transMeta.setSlaveTransformation( EXP_TRANS_SLAVE_TRANSFORMATION );
  return transMeta;
}