Java Code Examples for org.pentaho.di.repository.Repository#countNrJobEntryAttributes()

The following examples show how to use org.pentaho.di.repository.Repository#countNrJobEntryAttributes() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: JobEntryColumnsExist.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
  List<SlaveServer> slaveServers ) throws KettleException {
  try {
    tablename = rep.getJobEntryAttributeString( id_jobentry, "tablename" );
    schemaname = rep.getJobEntryAttributeString( id_jobentry, "schemaname" );

    connection = rep.loadDatabaseMetaFromJobEntryAttribute( id_jobentry, "connection", "id_database", databases );

    // How many arguments?
    int argnr = rep.countNrJobEntryAttributes( id_jobentry, "name" );
    arguments = new String[argnr];

    // Read them all...
    for ( int a = 0; a < argnr; a++ ) {
      arguments[a] = rep.getJobEntryAttributeString( id_jobentry, a, "name" );
    }

  } catch ( KettleDatabaseException dbe ) {
    throw new KettleException( BaseMessages.getString( PKG, "JobEntryColumnsExist.Meta.UnableLoadRep", ""
      + id_jobentry ), dbe );
  }
}
 
Example 2
Source File: JobEntryDeleteFiles.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
  List<SlaveServer> slaveServers ) throws KettleException {
  try {
    argFromPrevious = rep.getJobEntryAttributeBoolean( id_jobentry, "arg_from_previous" );
    includeSubfolders = rep.getJobEntryAttributeBoolean( id_jobentry, "include_subfolders" );

    int numberOfArgs = rep.countNrJobEntryAttributes( id_jobentry, "name" );
    allocate( numberOfArgs );

    for ( int i = 0; i < numberOfArgs; i++ ) {
      arguments[i] = rep.getJobEntryAttributeString( id_jobentry, i, "name" );
      filemasks[i] = rep.getJobEntryAttributeString( id_jobentry, i, "filemask" );
    }
  } catch ( KettleException dbe ) {
    throw new KettleException( BaseMessages.getString( PKG, "JobEntryDeleteFiles.UnableToLoadFromRepo", String
      .valueOf( id_jobentry ) ), dbe );
  }
}
 
Example 3
Source File: JobEntryXMLWellFormed.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
    List<SlaveServer> slaveServers ) throws KettleException {
  try {
    arg_from_previous = rep.getJobEntryAttributeBoolean( id_jobentry, "arg_from_previous" );
    include_subfolders = rep.getJobEntryAttributeBoolean( id_jobentry, "include_subfolders" );

    nr_errors_less_than = rep.getJobEntryAttributeString( id_jobentry, "nr_errors_less_than" );
    success_condition = rep.getJobEntryAttributeString( id_jobentry, "success_condition" );
    resultfilenames = rep.getJobEntryAttributeString( id_jobentry, "resultfilenames" );

    // How many arguments?
    int argnr = rep.countNrJobEntryAttributes( id_jobentry, "source_filefolder" );
    source_filefolder = new String[argnr];
    wildcard = new String[argnr];

    // Read them all...
    for ( int a = 0; a < argnr; a++ ) {
      source_filefolder[a] = rep.getJobEntryAttributeString( id_jobentry, a, "source_filefolder" );
      wildcard[a] = rep.getJobEntryAttributeString( id_jobentry, a, "wildcard" );
    }
  } catch ( KettleException dbe ) {

    throw new KettleException( BaseMessages.getString( PKG, "JobXMLWellFormed.Error.Exception.UnableLoadRep" )
        + id_jobentry, dbe );
  }
}
 
Example 4
Source File: JobEntryAddResultFilenames.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
  List<SlaveServer> slaveServers ) throws KettleException {
  try {
    argFromPrevious = rep.getJobEntryAttributeBoolean( id_jobentry, "arg_from_previous" );
    includeSubfolders = rep.getJobEntryAttributeBoolean( id_jobentry, "include_subfolders" );

    deleteallbefore = rep.getJobEntryAttributeBoolean( id_jobentry, "delete_all_before" );

    // How many arguments?
    int argnr = rep.countNrJobEntryAttributes( id_jobentry, "name" );
    arguments = new String[argnr];
    filemasks = new String[argnr];

    // Read them all...
    for ( int a = 0; a < argnr; a++ ) {
      arguments[a] = rep.getJobEntryAttributeString( id_jobentry, a, "name" );
      filemasks[a] = rep.getJobEntryAttributeString( id_jobentry, a, "filemask" );
    }
  } catch ( KettleException dbe ) {
    throw new KettleException( BaseMessages.getString(
      PKG, "JobEntryAddResultFilenames.UnableToLoadFromRepo", String.valueOf( id_jobentry ) ), dbe );
  }
}
 
Example 5
Source File: JobEntryCheckDbConnections.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
  List<SlaveServer> slaveServers ) throws KettleException {
  try {
    // How many connections?
    int argnr = rep.countNrJobEntryAttributes( id_jobentry, "id_database" );
    connections = new DatabaseMeta[argnr];
    waitfors = new String[argnr];
    waittimes = new int[argnr];
    // Read them all...
    for ( int a = 0; a < argnr; a++ ) {
      connections[a] =
        rep.loadDatabaseMetaFromJobEntryAttribute( id_jobentry, "connection", a, "id_database", databases );
      waitfors[a] = rep.getJobEntryAttributeString( id_jobentry, a, "waitfor" );
      waittimes[a] =
        getWaitByCode( Const.NVL( rep.getJobEntryAttributeString( id_jobentry, a, "waittime" ), "" ) );
    }
  } catch ( KettleException dbe ) {
    throw new KettleException( BaseMessages.getString(
      PKG, "JobEntryCheckDbConnections.ERROR_0002_Cannot_Load_Job_From_Repository", "" + id_jobentry, dbe
        .getMessage() ) );
  }
}
 
Example 6
Source File: JobEntryTruncateTables.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
  List<SlaveServer> slaveServers ) throws KettleException {
  try {
    connection = rep.loadDatabaseMetaFromJobEntryAttribute( id_jobentry, "connection", "id_database", databases );

    this.argFromPrevious = rep.getJobEntryAttributeBoolean( id_jobentry, "arg_from_previous" );
    // How many arguments?
    int argnr = rep.countNrJobEntryAttributes( id_jobentry, "name" );
    allocate( argnr );

    // Read them all...
    for ( int a = 0; a < argnr; a++ ) {
      this.arguments[a] = rep.getJobEntryAttributeString( id_jobentry, a, "name" );
      this.schemaname[a] = rep.getJobEntryAttributeString( id_jobentry, a, "schemaname" );
    }

  } catch ( KettleDatabaseException dbe ) {
    throw new KettleException( BaseMessages.getString( PKG, "JobEntryTruncateTables.UnableLoadRep", ""
      + id_jobentry ), dbe );
  }
}
 
Example 7
Source File: JobEntryDeleteFolders.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
  List<SlaveServer> slaveServers ) throws KettleException {
  try {
    argFromPrevious = rep.getJobEntryAttributeBoolean( id_jobentry, "arg_from_previous" );
    limit_folders = rep.getJobEntryAttributeString( id_jobentry, "limit_folders" );
    success_condition = rep.getJobEntryAttributeString( id_jobentry, "success_condition" );

    // How many arguments?
    int argnr = rep.countNrJobEntryAttributes( id_jobentry, "name" );
    allocate( argnr );

    // Read them all...
    for ( int a = 0; a < argnr; a++ ) {
      arguments[a] = rep.getJobEntryAttributeString( id_jobentry, a, "name" );
    }
  } catch ( KettleException dbe ) {
    throw new KettleException( BaseMessages.getString( PKG, "JobEntryDeleteFolders.UnableToLoadFromRepo", String
      .valueOf( id_jobentry ) ), dbe );
  }
}
 
Example 8
Source File: JobEntryCheckFilesLocked.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId idJobEntry, List<DatabaseMeta> databases,
  List<SlaveServer> slaveServers ) throws KettleException {
  try {
    argFromPrevious = rep.getJobEntryAttributeBoolean( idJobEntry, ARG_FROM_PREVIOUS_ATTR );
    includeSubfolders = rep.getJobEntryAttributeBoolean( idJobEntry, INCLUDE_SUBFOLDERS_ATTR );

    // How many arguments?
    int argnr = rep.countNrJobEntryAttributes( idJobEntry, NAME_ATTR );
    arguments = new String[argnr];
    filemasks = new String[argnr];

    // Read them all...
    for ( int a = 0; a < argnr; a++ ) {
      arguments[a] = rep.getJobEntryAttributeString( idJobEntry, a, NAME_ATTR );
      filemasks[a] = rep.getJobEntryAttributeString( idJobEntry, a, FILE_MASK_ATTR );
    }
  } catch ( KettleException dbe ) {
    throw new KettleException( BaseMessages.getString(
      PKG, "JobEntryCheckFilesLocked.UnableToLoadFromRepo", String.valueOf( idJobEntry ) ), dbe );
  }
}
 
Example 9
Source File: JobEntryXSLT.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
    List<SlaveServer> slaveServers ) throws KettleException {
  try {
    xmlfilename = rep.getJobEntryAttributeString( id_jobentry, "xmlfilename" );
    xslfilename = rep.getJobEntryAttributeString( id_jobentry, "xslfilename" );
    outputfilename = rep.getJobEntryAttributeString( id_jobentry, "outputfilename" );
    iffileexists = (int) rep.getJobEntryAttributeInteger( id_jobentry, "iffileexists" );
    addfiletoresult = rep.getJobEntryAttributeBoolean( id_jobentry, "addfiletoresult" );
    filenamesfromprevious = rep.getJobEntryAttributeBoolean( id_jobentry, "filenamesfromprevious" );
    xsltfactory = rep.getJobEntryAttributeString( id_jobentry, "xsltfactory" );
    if ( xsltfactory == null ) {
      xsltfactory = FACTORY_JAXP;
    }

    int nrparams = rep.countNrJobEntryAttributes( id_jobentry, "param_name" );
    int nroutputprops = rep.countNrJobEntryAttributes( id_jobentry, "output_property_name" );
    allocate( nrparams, nroutputprops );

    for ( int i = 0; i < nrparams; i++ ) {
      parameterField[i] = rep.getJobEntryAttributeString( id_jobentry, i, "param_field" );
      parameterName[i] = rep.getJobEntryAttributeString( id_jobentry, i, "param_name" );
    }
    for ( int i = 0; i < nroutputprops; i++ ) {
      outputPropertyName[i] = rep.getJobEntryAttributeString( id_jobentry, i, "output_property_name" );
      outputPropertyValue[i] = rep.getJobEntryAttributeString( id_jobentry, i, "output_property_value" );
    }
  } catch ( KettleException dbe ) {
    throw new KettleException( "Unable to load job entry of type 'xslt' from the repository for id_jobentry="
        + id_jobentry, dbe );
  }
}
 
Example 10
Source File: JobEntryMSAccessBulkLoad.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
  List<SlaveServer> slaveServers ) throws KettleException {
  try {
    include_subfolders = rep.getJobEntryAttributeBoolean( id_jobentry, "include_subfolders" );
    add_result_filenames = rep.getJobEntryAttributeBoolean( id_jobentry, "add_result_filenames" );
    is_args_from_previous = rep.getJobEntryAttributeBoolean( id_jobentry, "is_args_from_previous" );

    limit = rep.getJobEntryAttributeString( id_jobentry, "limit" );
    success_condition = rep.getJobEntryAttributeString( id_jobentry, "success_condition" );

    // How many arguments?
    int argnr = rep.countNrJobEntryAttributes( id_jobentry, "source_filefolder" );
    source_filefolder = new String[argnr];
    source_wildcard = new String[argnr];
    delimiter = new String[argnr];
    target_Db = new String[argnr];
    target_table = new String[argnr];

    // Read them all...
    for ( int a = 0; a < argnr; a++ ) {
      source_filefolder[a] = rep.getJobEntryAttributeString( id_jobentry, a, "source_filefolder" );
      source_wildcard[a] = rep.getJobEntryAttributeString( id_jobentry, a, "source_wildcard" );
      delimiter[a] = rep.getJobEntryAttributeString( id_jobentry, a, "delimiter" );
      target_Db[a] = rep.getJobEntryAttributeString( id_jobentry, a, "target_db" );
      target_table[a] = rep.getJobEntryAttributeString( id_jobentry, a, "target_table" );
    }
  } catch ( KettleException dbe ) {
    throw new KettleException( BaseMessages.getString( PKG, "JobEntryMSAccessBulkLoad.Meta.UnableLoadRep", ""
      + id_jobentry, dbe.getMessage() ), dbe );
  }
}
 
Example 11
Source File: JobEntryCopyFiles.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
    List<SlaveServer> slaveServers ) throws KettleException {
  try {
    copy_empty_folders = rep.getJobEntryAttributeBoolean( id_jobentry, "copy_empty_folders" );
    arg_from_previous = rep.getJobEntryAttributeBoolean( id_jobentry, "arg_from_previous" );
    overwrite_files = rep.getJobEntryAttributeBoolean( id_jobentry, "overwrite_files" );
    include_subfolders = rep.getJobEntryAttributeBoolean( id_jobentry, "include_subfolders" );
    remove_source_files = rep.getJobEntryAttributeBoolean( id_jobentry, "remove_source_files" );

    add_result_filesname = rep.getJobEntryAttributeBoolean( id_jobentry, "add_result_filesname" );
    destination_is_a_file = rep.getJobEntryAttributeBoolean( id_jobentry, "destination_is_a_file" );
    create_destination_folder = rep.getJobEntryAttributeBoolean( id_jobentry, "create_destination_folder" );

    // How many arguments?
    int argnr = rep.countNrJobEntryAttributes( id_jobentry, "source_filefolder" );
    allocate( argnr );

    // Read them all...
    for ( int a = 0; a < argnr; a++ ) {
      source_filefolder[a] = loadSourceRep( rep, id_jobentry, a );
      destination_filefolder[a] = loadDestinationRep( rep, id_jobentry, a );
      wildcard[a] = rep.getJobEntryAttributeString( id_jobentry, a, "wildcard" );
    }
  } catch ( KettleException dbe ) {
    throw new KettleException( BaseMessages.getString( PKG, "JobCopyFiles.Error.Exception.UnableLoadRep" )
      + id_jobentry, dbe );
  }
}
 
Example 12
Source File: JobEntryShell.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
  List<SlaveServer> slaveServers ) throws KettleException {
  try {
    setFileName( rep.getJobEntryAttributeString( id_jobentry, "file_name" ) );
    setWorkDirectory( rep.getJobEntryAttributeString( id_jobentry, "work_directory" ) );
    argFromPrevious = rep.getJobEntryAttributeBoolean( id_jobentry, "arg_from_previous" );
    execPerRow = rep.getJobEntryAttributeBoolean( id_jobentry, "exec_per_row" );

    setLogfile = rep.getJobEntryAttributeBoolean( id_jobentry, "set_logfile" );
    setAppendLogfile = rep.getJobEntryAttributeBoolean( id_jobentry, "set_append_logfile" );
    addDate = rep.getJobEntryAttributeBoolean( id_jobentry, "add_date" );
    addTime = rep.getJobEntryAttributeBoolean( id_jobentry, "add_time" );
    logfile = rep.getJobEntryAttributeString( id_jobentry, "logfile" );
    logext = rep.getJobEntryAttributeString( id_jobentry, "logext" );
    logFileLevel = LogLevel.getLogLevelForCode( rep.getJobEntryAttributeString( id_jobentry, "loglevel" ) );
    insertScript = rep.getJobEntryAttributeBoolean( id_jobentry, "insertScript" );

    script = rep.getJobEntryAttributeString( id_jobentry, "script" );
    // How many arguments?
    int argnr = rep.countNrJobEntryAttributes( id_jobentry, "argument" );
    allocate( argnr );

    // Read them all...
    for ( int a = 0; a < argnr; a++ ) {
      arguments[a] = rep.getJobEntryAttributeString( id_jobentry, a, "argument" );
    }
  } catch ( KettleDatabaseException dbe ) {
    throw new KettleException( "Unable to load job entry of type 'shell' from the repository with id_jobentry="
      + id_jobentry, dbe );
  }
}
 
Example 13
Source File: CheckConnections.java    From knowbi-pentaho-pdi-neo4j-output with Apache License 2.0 5 votes vote down vote up
@Override public void loadRep( Repository rep, IMetaStore metaStore, ObjectId jobEntryId, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException {
  connectionNames = new ArrayList<>();
  int nrConnections = rep.countNrJobEntryAttributes( jobEntryId, "connection" );
  for ( int i = 0; i < nrConnections; i++ ) {
    connectionNames.add( rep.getJobEntryAttributeString( jobEntryId, i, "connection" ) );
  }
}
 
Example 14
Source File: JobEntryEvalFilesMetrics.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
  List<SlaveServer> slaveServers ) throws KettleException {
  try {
    // How many arguments?
    int argnr = rep.countNrJobEntryAttributes( id_jobentry, "source_filefolder" );
    allocate( argnr );

    // Read them all...
    for ( int a = 0; a < argnr; a++ ) {
      sourceFileFolder[a] = rep.getJobEntryAttributeString( id_jobentry, a, "source_filefolder" );
      sourceWildcard[a] = rep.getJobEntryAttributeString( id_jobentry, a, "wildcard" );
      sourceIncludeSubfolders[a] = rep.getJobEntryAttributeString( id_jobentry, a, "include_subFolders" );
    }

    resultFilenamesWildcard = rep.getJobEntryAttributeString( id_jobentry, "result_filenames_wildcard" );
    ResultFieldFile = rep.getJobEntryAttributeString( id_jobentry, "result_field_file" );
    ResultFieldWildcard = rep.getJobEntryAttributeString( id_jobentry, "result_field_wild" );
    ResultFieldIncludesubFolders =
      rep.getJobEntryAttributeString( id_jobentry, "result_field_includesubfolders" );
    comparevalue = rep.getJobEntryAttributeString( id_jobentry, "comparevalue" );
    minvalue = rep.getJobEntryAttributeString( id_jobentry, "minvalue" );
    maxvalue = rep.getJobEntryAttributeString( id_jobentry, "maxvalue" );
    successConditionType =
      JobEntrySimpleEval.getSuccessNumberConditionByCode( Const.NVL( rep.getJobEntryAttributeString(
        id_jobentry, "successnumbercondition" ), "" ) );
    sourceFiles =
      getSourceFilesByCode( Const.NVL( rep.getJobEntryAttributeString( id_jobentry, "source_files" ), "" ) );
    evaluationType =
      getEvaluationTypeByCode( Const
        .NVL( rep.getJobEntryAttributeString( id_jobentry, "evaluation_type" ), "" ) );
    scale = getScaleByCode( Const.NVL( rep.getJobEntryAttributeString( id_jobentry, "scale" ), "" ) );
  } catch ( KettleException dbe ) {

    throw new KettleException( BaseMessages.getString( PKG, "JobEvalFilesMetrics.Error.Exception.UnableLoadRep" )
      + id_jobentry, dbe );
  }
}
 
Example 15
Source File: JobEntryDosToUnix.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
  List<SlaveServer> slaveServers ) throws KettleException {
  try {
    arg_from_previous = rep.getJobEntryAttributeBoolean( id_jobentry, "arg_from_previous" );
    include_subfolders = rep.getJobEntryAttributeBoolean( id_jobentry, "include_subfolders" );

    nr_errors_less_than = rep.getJobEntryAttributeString( id_jobentry, "nr_errors_less_than" );
    success_condition = rep.getJobEntryAttributeString( id_jobentry, "success_condition" );
    resultfilenames = rep.getJobEntryAttributeString( id_jobentry, "resultfilenames" );

    // How many arguments?
    int argnr = rep.countNrJobEntryAttributes( id_jobentry, "source_filefolder" );
    allocate( argnr );

    // Read them all...
    for ( int a = 0; a < argnr; a++ ) {
      source_filefolder[a] = rep.getJobEntryAttributeString( id_jobentry, a, "source_filefolder" );
      wildcard[a] = rep.getJobEntryAttributeString( id_jobentry, a, "wildcard" );
      conversionTypes[a] =
        getConversionTypeByCode( Const.NVL(
          rep.getJobEntryAttributeString( id_jobentry, "ConversionType" ), "" ) );
    }
  } catch ( KettleException dbe ) {

    throw new KettleException( BaseMessages.getString( PKG, "JobDosToUnix.Error.Exception.UnableLoadRep" )
      + id_jobentry, dbe );
  }
}
 
Example 16
Source File: JobEntryTrans.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
@Override
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
                     List<SlaveServer> slaveServers ) throws KettleException {
  try {
    String method = rep.getJobEntryAttributeString( id_jobentry, "specification_method" );
    specificationMethod = ObjectLocationSpecificationMethod.getSpecificationMethodByCode( method );
    String transId = rep.getJobEntryAttributeString( id_jobentry, "trans_object_id" );
    transObjectId = Utils.isEmpty( transId ) ? null : new StringObjectId( transId );
    transname = rep.getJobEntryAttributeString( id_jobentry, "name" );
    directory = rep.getJobEntryAttributeString( id_jobentry, "dir_path" );
    filename = rep.getJobEntryAttributeString( id_jobentry, "file_name" );

    // Backward compatibility check for object specification
    //
    checkObjectLocationSpecificationMethod();

    argFromPrevious = rep.getJobEntryAttributeBoolean( id_jobentry, "arg_from_previous" );
    paramsFromPrevious = rep.getJobEntryAttributeBoolean( id_jobentry, "params_from_previous" );
    execPerRow = rep.getJobEntryAttributeBoolean( id_jobentry, "exec_per_row" );
    clearResultRows = rep.getJobEntryAttributeBoolean( id_jobentry, "clear_rows", true );
    clearResultFiles = rep.getJobEntryAttributeBoolean( id_jobentry, "clear_files", true );
    setLogfile = rep.getJobEntryAttributeBoolean( id_jobentry, "set_logfile" );
    addDate = rep.getJobEntryAttributeBoolean( id_jobentry, "add_date" );
    addTime = rep.getJobEntryAttributeBoolean( id_jobentry, "add_time" );
    logfile = rep.getJobEntryAttributeString( id_jobentry, "logfile" );
    logext = rep.getJobEntryAttributeString( id_jobentry, "logext" );
    logFileLevel = LogLevel.getLogLevelForCode( rep.getJobEntryAttributeString( id_jobentry, "loglevel" ) );
    clustering = rep.getJobEntryAttributeBoolean( id_jobentry, "cluster" );
    createParentFolder = rep.getJobEntryAttributeBoolean( id_jobentry, "create_parent_folder" );

    remoteSlaveServerName = rep.getJobEntryAttributeString( id_jobentry, "slave_server_name" );
    setAppendLogfile = rep.getJobEntryAttributeBoolean( id_jobentry, "set_append_logfile" );
    waitingToFinish = rep.getJobEntryAttributeBoolean( id_jobentry, "wait_until_finished", true );
    followingAbortRemotely = rep.getJobEntryAttributeBoolean( id_jobentry, "follow_abort_remote" );
    loggingRemoteWork = rep.getJobEntryAttributeBoolean( id_jobentry, "logging_remote_work" );
    runConfiguration = rep.getJobEntryAttributeString( id_jobentry, "run_configuration" );
    setSuppressResultData( rep.getJobEntryAttributeBoolean( id_jobentry, "suppress_result_data", false ) );

    // How many arguments?
    int argnr = rep.countNrJobEntryAttributes( id_jobentry, "argument" );
    allocateArgs( argnr );

    // Read all arguments...
    for ( int a = 0; a < argnr; a++ ) {
      arguments[ a ] = rep.getJobEntryAttributeString( id_jobentry, a, "argument" );
    }

    // How many arguments?
    int parameternr = rep.countNrJobEntryAttributes( id_jobentry, "parameter_name" );
    allocateParams( parameternr );

    // Read all parameters ...
    for ( int a = 0; a < parameternr; a++ ) {
      parameters[ a ] = rep.getJobEntryAttributeString( id_jobentry, a, "parameter_name" );
      parameterFieldNames[ a ] = rep.getJobEntryAttributeString( id_jobentry, a, "parameter_stream_name" );
      parameterValues[ a ] = rep.getJobEntryAttributeString( id_jobentry, a, "parameter_value" );
    }

    passingAllParameters = rep.getJobEntryAttributeBoolean( id_jobentry, "pass_all_parameters", true );

  } catch ( KettleDatabaseException dbe ) {
    throw new KettleException( "Unable to load job entry of type 'trans' from the repository for id_jobentry="
      + id_jobentry, dbe );
  }
}
 
Example 17
Source File: JobEntryHTTP.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
@Override
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
                     List<SlaveServer> slaveServers ) throws KettleException {
  try {
    url = rep.getJobEntryAttributeString( id_jobentry, "url" );
    targetFilename = rep.getJobEntryAttributeString( id_jobentry, "targetfilename" );
    fileAppended = rep.getJobEntryAttributeBoolean( id_jobentry, "file_appended" );
    dateTimeAdded = rep.getJobEntryAttributeBoolean( id_jobentry, "date_time_added" );
    targetFilenameExtension = Const.NVL( rep.getJobEntryAttributeString( id_jobentry, "targetfilename_extension" ),
      rep.getJobEntryAttributeString( id_jobentry, "targetfilename_extention" ) );

    uploadFilename = rep.getJobEntryAttributeString( id_jobentry, "uploadfilename" );

    urlFieldname = rep.getJobEntryAttributeString( id_jobentry, "url_fieldname" );
    uploadFieldname = rep.getJobEntryAttributeString( id_jobentry, "upload_fieldname" );
    destinationFieldname = rep.getJobEntryAttributeString( id_jobentry, "dest_fieldname" );
    runForEveryRow = rep.getJobEntryAttributeBoolean( id_jobentry, "run_every_row" );

    username = rep.getJobEntryAttributeString( id_jobentry, "username" );
    password =
      Encr.decryptPasswordOptionallyEncrypted( rep.getJobEntryAttributeString( id_jobentry, "password" ) );

    proxyHostname = rep.getJobEntryAttributeString( id_jobentry, "proxy_host" );
    proxyPort = rep.getJobEntryAttributeString( id_jobentry, "proxy_port" ); // backward compatible.

    nonProxyHosts = rep.getJobEntryAttributeString( id_jobentry, "non_proxy_hosts" );
    addfilenameresult =
      "Y".equalsIgnoreCase( Const
        .NVL( rep.getJobEntryAttributeString( id_jobentry, "addfilenameresult" ), "Y" ) );

    // How many headerName?
    int argnr = rep.countNrJobEntryAttributes( id_jobentry, "header_name" );
    allocate( argnr );

    for ( int a = 0; a < argnr; a++ ) {
      headerName[ a ] = rep.getJobEntryAttributeString( id_jobentry, a, "header_name" );
      headerValue[ a ] = rep.getJobEntryAttributeString( id_jobentry, a, "header_value" );
    }
  } catch ( KettleException dbe ) {
    throw new KettleException( "Unable to load job entry of type 'HTTP' from the repository for id_jobentry="
      + id_jobentry, dbe );
  }
}
 
Example 18
Source File: JobEntryMail.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
  List<SlaveServer> slaveServers ) throws KettleException {
  try {
    // First load the common parts like name & description, then the attributes...
    //
    server = rep.getJobEntryAttributeString( id_jobentry, "server" );
    port = rep.getJobEntryAttributeString( id_jobentry, "port" );
    destination = rep.getJobEntryAttributeString( id_jobentry, "destination" );
    destinationCc = rep.getJobEntryAttributeString( id_jobentry, "destinationCc" );
    destinationBCc = rep.getJobEntryAttributeString( id_jobentry, "destinationBCc" );
    replyAddress = rep.getJobEntryAttributeString( id_jobentry, "replyto" );
    replyName = rep.getJobEntryAttributeString( id_jobentry, "replytoname" );
    subject = rep.getJobEntryAttributeString( id_jobentry, "subject" );
    includeDate = rep.getJobEntryAttributeBoolean( id_jobentry, "include_date" );
    contactPerson = rep.getJobEntryAttributeString( id_jobentry, "contact_person" );
    contactPhone = rep.getJobEntryAttributeString( id_jobentry, "contact_phone" );
    comment = rep.getJobEntryAttributeString( id_jobentry, "comment" );
    encoding = rep.getJobEntryAttributeString( id_jobentry, "encoding" );
    priority = rep.getJobEntryAttributeString( id_jobentry, "priority" );
    importance = rep.getJobEntryAttributeString( id_jobentry, "importance" );
    sensitivity = rep.getJobEntryAttributeString( id_jobentry, "sensitivity" );
    includingFiles = rep.getJobEntryAttributeBoolean( id_jobentry, "include_files" );
    usingAuthentication = rep.getJobEntryAttributeBoolean( id_jobentry, "use_auth" );
    usingSecureAuthentication = rep.getJobEntryAttributeBoolean( id_jobentry, "use_secure_auth" );
    authenticationUser = rep.getJobEntryAttributeString( id_jobentry, "auth_user" );
    authenticationPassword =
      Encr.decryptPasswordOptionallyEncrypted( rep.getJobEntryAttributeString( id_jobentry, "auth_password" ) );
    onlySendComment = rep.getJobEntryAttributeBoolean( id_jobentry, "only_comment" );
    useHTML = rep.getJobEntryAttributeBoolean( id_jobentry, "use_HTML" );
    usePriority = rep.getJobEntryAttributeBoolean( id_jobentry, "use_Priority" );
    secureConnectionType = rep.getJobEntryAttributeString( id_jobentry, "secureconnectiontype" );

    int nrTypes = rep.countNrJobEntryAttributes( id_jobentry, "file_type" );
    allocate( nrTypes );

    for ( int i = 0; i < nrTypes; i++ ) {
      String typeCode = rep.getJobEntryAttributeString( id_jobentry, i, "file_type" );
      fileType[i] = ResultFile.getType( typeCode );
    }

    zipFiles = rep.getJobEntryAttributeBoolean( id_jobentry, "zip_files" );
    zipFilename = rep.getJobEntryAttributeString( id_jobentry, "zip_name" );
    replyToAddresses = rep.getJobEntryAttributeString( id_jobentry, "replyToAddresses" );

    // How many arguments?
    int imagesnr = rep.countNrJobEntryAttributes( id_jobentry, "embeddedimage" );
    allocateImages( imagesnr );

    // Read them all...
    for ( int a = 0; a < imagesnr; a++ ) {
      embeddedimages[a] = rep.getJobEntryAttributeString( id_jobentry, a, "embeddedimage" );
      contentids[a] = rep.getJobEntryAttributeString( id_jobentry, a, "contentid" );
    }

  } catch ( KettleDatabaseException dbe ) {
    throw new KettleException( "Unable to load job entry of type 'mail' from the repository with id_jobentry="
      + id_jobentry, dbe );
  }

}
 
Example 19
Source File: JobEntryJob.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
/**
 * Load the jobentry from repository
 */
@Override
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
  List<SlaveServer> slaveServers ) throws KettleException {
  try {
    String method = rep.getJobEntryAttributeString( id_jobentry, "specification_method" );
    specificationMethod = ObjectLocationSpecificationMethod.getSpecificationMethodByCode( method );
    String jobId = rep.getJobEntryAttributeString( id_jobentry, "job_object_id" );
    jobObjectId = Utils.isEmpty( jobId ) ? null : new StringObjectId( jobId );
    jobname = rep.getJobEntryAttributeString( id_jobentry, "name" );
    directory = rep.getJobEntryAttributeString( id_jobentry, "dir_path" );
    filename = rep.getJobEntryAttributeString( id_jobentry, "file_name" );

    /*
     * when loaded from a repository and jobname is present use REPOSITORY_BY_NAME
     * if filename is not present and jobObjectId use REPOSITORY_BY_REFERENCE for backwards compatibility
     * if filename is present use FILENAME
     * if nothing else, default to REPOSITORY_BY_NAME
     *
     * no other options are supported
     */

    if ( !Utils.isEmpty( jobname ) ) {
      specificationMethod = ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME;
    } else if ( jobObjectId != null ) {
      specificationMethod = ObjectLocationSpecificationMethod.REPOSITORY_BY_REFERENCE;
    } else if ( !Utils.isEmpty( filename ) ) {
      specificationMethod = ObjectLocationSpecificationMethod.FILENAME;
    } else {
      specificationMethod = ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME;
    }

    argFromPrevious = rep.getJobEntryAttributeBoolean( id_jobentry, "arg_from_previous" );
    paramsFromPrevious = rep.getJobEntryAttributeBoolean( id_jobentry, "params_from_previous" );
    execPerRow = rep.getJobEntryAttributeBoolean( id_jobentry, "exec_per_row" );
    setLogfile = rep.getJobEntryAttributeBoolean( id_jobentry, "set_logfile" );
    addDate = rep.getJobEntryAttributeBoolean( id_jobentry, "add_date" );
    addTime = rep.getJobEntryAttributeBoolean( id_jobentry, "add_time" );
    logfile = rep.getJobEntryAttributeString( id_jobentry, "logfile" );
    logext = rep.getJobEntryAttributeString( id_jobentry, "logext" );
    logFileLevel = LogLevel.getLogLevelForCode( rep.getJobEntryAttributeString( id_jobentry, "loglevel" ) );
    setAppendLogfile = rep.getJobEntryAttributeBoolean( id_jobentry, "set_append_logfile" );
    remoteSlaveServerName = rep.getJobEntryAttributeString( id_jobentry, "slave_server_name" );
    passingExport = rep.getJobEntryAttributeBoolean( id_jobentry, "pass_export" );
    waitingToFinish = rep.getJobEntryAttributeBoolean( id_jobentry, "wait_until_finished", true );
    followingAbortRemotely = rep.getJobEntryAttributeBoolean( id_jobentry, "follow_abort_remote" );
    expandingRemoteJob = rep.getJobEntryAttributeBoolean( id_jobentry, "expand_remote_job" );
    createParentFolder = rep.getJobEntryAttributeBoolean( id_jobentry, "create_parent_folder" );
    runConfiguration = rep.getJobEntryAttributeString( id_jobentry, "run_configuration" );

    // How many arguments?
    int argnr = rep.countNrJobEntryAttributes( id_jobentry, "argument" );
    allocateArgs( argnr );

    // Read all arguments ...
    for ( int a = 0; a < argnr; a++ ) {
      arguments[a] = rep.getJobEntryAttributeString( id_jobentry, a, "argument" );
    }

    // How many arguments?
    int parameternr = rep.countNrJobEntryAttributes( id_jobentry, "parameter_name" );
    allocateParams( parameternr );

    // Read all parameters ...
    for ( int a = 0; a < parameternr; a++ ) {
      parameters[a] = rep.getJobEntryAttributeString( id_jobentry, a, "parameter_name" );
      parameterFieldNames[a] = rep.getJobEntryAttributeString( id_jobentry, a, "parameter_stream_name" );
      parameterValues[a] = rep.getJobEntryAttributeString( id_jobentry, a, "parameter_value" );
    }

    passingAllParameters = rep.getJobEntryAttributeBoolean( id_jobentry, "pass_all_parameters", true );

  } catch ( KettleDatabaseException dbe ) {
    throw new KettleException( "Unable to load job entry of type 'job' from the repository with id_jobentry="
      + id_jobentry, dbe );
  }
}
 
Example 20
Source File: JobEntryMoveFiles.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
  List<SlaveServer> slaveServers ) throws KettleException {
  try {
    move_empty_folders = rep.getJobEntryAttributeBoolean( id_jobentry, "move_empty_folders" );
    arg_from_previous = rep.getJobEntryAttributeBoolean( id_jobentry, "arg_from_previous" );
    include_subfolders = rep.getJobEntryAttributeBoolean( id_jobentry, "include_subfolders" );
    add_result_filesname = rep.getJobEntryAttributeBoolean( id_jobentry, "add_result_filesname" );
    destination_is_a_file = rep.getJobEntryAttributeBoolean( id_jobentry, "destination_is_a_file" );
    create_destination_folder = rep.getJobEntryAttributeBoolean( id_jobentry, "create_destination_folder" );
    nr_errors_less_than = rep.getJobEntryAttributeString( id_jobentry, "nr_errors_less_than" );
    success_condition = rep.getJobEntryAttributeString( id_jobentry, "success_condition" );
    add_date = rep.getJobEntryAttributeBoolean( id_jobentry, "add_date" );
    add_time = rep.getJobEntryAttributeBoolean( id_jobentry, "add_time" );
    SpecifyFormat = rep.getJobEntryAttributeBoolean( id_jobentry, "SpecifyFormat" );
    date_time_format = rep.getJobEntryAttributeString( id_jobentry, "date_time_format" );
    AddDateBeforeExtension = rep.getJobEntryAttributeBoolean( id_jobentry, "AddDateBeforeExtension" );
    DoNotKeepFolderStructure = rep.getJobEntryAttributeBoolean( id_jobentry, "DoNotKeepFolderStructure" );
    iffileexists = rep.getJobEntryAttributeString( id_jobentry, "iffileexists" );
    destinationFolder = rep.getJobEntryAttributeString( id_jobentry, "destinationFolder" );
    ifmovedfileexists = rep.getJobEntryAttributeString( id_jobentry, "ifmovedfileexists" );
    moved_date_time_format = rep.getJobEntryAttributeString( id_jobentry, "moved_date_time_format" );
    AddMovedDateBeforeExtension = rep.getJobEntryAttributeBoolean( id_jobentry, "AddMovedDateBeforeExtension" );
    create_move_to_folder = rep.getJobEntryAttributeBoolean( id_jobentry, "create_move_to_folder" );
    add_moved_date = rep.getJobEntryAttributeBoolean( id_jobentry, "add_moved_date" );
    add_moved_time = rep.getJobEntryAttributeBoolean( id_jobentry, "add_moved_time" );
    SpecifyMoveFormat = rep.getJobEntryAttributeBoolean( id_jobentry, "SpecifyMoveFormat" );
    simulate = rep.getJobEntryAttributeBoolean( id_jobentry, "simulate" );

    // How many arguments?
    int argnr = rep.countNrJobEntryAttributes( id_jobentry, "source_filefolder" );
    allocate( argnr );

    // Read them all...
    for ( int a = 0; a < argnr; a++ ) {
      source_filefolder[a] = rep.getJobEntryAttributeString( id_jobentry, a, "source_filefolder" );
      destination_filefolder[a] = rep.getJobEntryAttributeString( id_jobentry, a, "destination_filefolder" );
      wildcard[a] = rep.getJobEntryAttributeString( id_jobentry, a, "wildcard" );
    }
  } catch ( KettleException dbe ) {

    throw new KettleException( BaseMessages.getString( PKG, "JobMoveFiles.Error.Exception.UnableLoadRep" )
      + id_jobentry, dbe );
  }
}