Java Code Examples for org.pentaho.di.repository.Repository#saveJobEntryAttribute()

The following examples show how to use org.pentaho.di.repository.Repository#saveJobEntryAttribute() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: JobEntryEvalTableContent.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException {
  try {
    rep.saveDatabaseMetaJobEntryAttribute( id_job, getObjectId(), "connection", "id_database", connection );

    rep.saveJobEntryAttribute( id_job, getObjectId(), "schemaname", schemaname );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "tablename", tablename );
    rep.saveJobEntryAttribute(
      id_job, getObjectId(), "success_condition", getSuccessConditionCode( successCondition ) );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "limit", limit );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "custom_sql", customSQL );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "is_custom_sql", useCustomSQL );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "is_usevars", useVars );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "add_rows_result", addRowsResult );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "clear_result_rows", clearResultList );
  } catch ( KettleDatabaseException dbe ) {
    throw new KettleException( BaseMessages.getString( PKG, "JobEntryEvalTableContent.UnableSaveRep", ""
      + id_job ), dbe );
  }
}
 
Example 2
Source File: JobEntrySetVariables.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException {
  try {
    rep.saveJobEntryAttribute( id_job, getObjectId(), "replacevars", replaceVars );

    rep.saveJobEntryAttribute( id_job, getObjectId(), "filename", filename );
    rep.saveJobEntryAttribute(
      id_job, getObjectId(), "file_variable_type", getVariableTypeCode( fileVariableType ) );

    // save the variableName...
    if ( variableName != null ) {
      for ( int i = 0; i < variableName.length; i++ ) {
        rep.saveJobEntryAttribute( id_job, getObjectId(), i, "variable_name", variableName[i] );
        rep.saveJobEntryAttribute( id_job, getObjectId(), i, "variable_value", variableValue[i] );
        rep.saveJobEntryAttribute(
          id_job, getObjectId(), i, "variable_type", getVariableTypeCode( variableType[i] ) );
      }
    }
  } catch ( KettleDatabaseException dbe ) {
    throw new KettleException( BaseMessages.getString( PKG, "JobEntrySetVariables.Meta.UnableSaveRep", String
      .valueOf( id_job ), dbe.getMessage() ), dbe );
  }
}
 
Example 3
Source File: JobEntryDeleteFolders.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException {
  try {
    rep.saveJobEntryAttribute( id_job, getObjectId(), "arg_from_previous", argFromPrevious );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "limit_folders", limit_folders );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "success_condition", success_condition );

    // save the arguments...
    if ( arguments != null ) {
      for ( int i = 0; i < arguments.length; i++ ) {
        rep.saveJobEntryAttribute( id_job, getObjectId(), i, "name", arguments[i] );
      }
    }
  } catch ( KettleDatabaseException dbe ) {
    throw new KettleException( BaseMessages.getString( PKG, "JobEntryDeleteFolders.UnableToSaveToRepo", String
      .valueOf( id_job ) ), dbe );
  }
}
 
Example 4
Source File: JobEntryWaitForSQL.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
@Override
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException {
  try {
    rep.saveDatabaseMetaJobEntryAttribute( id_job, getObjectId(), "connection", "id_database", connection );

    rep.saveJobEntryAttribute( id_job, getObjectId(), "schemaname", schemaname );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "tablename", tablename );
    rep.saveJobEntryAttribute(
      id_job, getObjectId(), "success_condition", getSuccessConditionCode( successCondition ) );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "rows_count_value", rowsCountValue );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "custom_sql", customSQL );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "is_custom_sql", iscustomSQL );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "is_usevars", isUseVars );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "add_rows_result", isAddRowsResult );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "maximum_timeout", maximumTimeout );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "check_cycle_time", checkCycleTime );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "success_on_timeout", successOnTimeout );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "clear_result_rows", isClearResultList );

  } catch ( KettleDatabaseException dbe ) {
    throw new KettleException(
      BaseMessages.getString( PKG, "JobEntryWaitForSQL.UnableSaveRep", "" + id_job ), dbe );
  }
}
 
Example 5
Source File: JobEntryMysqlBulkFile.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException {
  try {
    rep.saveJobEntryAttribute( id_job, getObjectId(), "schemaname", schemaname );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "tablename", tablename );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "filename", filename );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "separator", separator );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "enclosed", enclosed );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "lineterminated", lineterminated );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "limitlines", limitlines );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "listcolumn", listcolumn );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "highpriority", highpriority );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "optionenclosed", optionenclosed );

    rep.saveJobEntryAttribute( id_job, getObjectId(), "outdumpvalue", outdumpvalue );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "iffileexists", iffileexists );

    rep.saveJobEntryAttribute( id_job, getObjectId(), "addfiletoresult", addfiletoresult );

    rep.saveDatabaseMetaJobEntryAttribute( id_job, getObjectId(), "connection", "id_database", connection );
  } catch ( KettleDatabaseException dbe ) {
    throw new KettleException(
      "Unable to load job entry of type 'Mysql Bulk Load' to the repository for id_job=" + id_job, dbe );
  }
}
 
Example 6
Source File: JobEntryMsgBoxInfo.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException {
  try {
    rep.saveJobEntryAttribute( id_job, getObjectId(), "bodymessage", bodymessage );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "titremessage", titremessage );
  } catch ( KettleDatabaseException dbe ) {
    throw new KettleException( "Unable to save job entry of type 'Msgbox Info' to the repository for id_job="
      + id_job, dbe );
  }
}
 
Example 7
Source File: JobEntryWriteToLog.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@Override
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException {
  try {
    rep.saveJobEntryAttribute( id_job, getObjectId(), "logmessage", logmessage );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "loglevel", ( entryLogLevel != null ? entryLogLevel
      .getCode() : "" ) );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "logsubject", logsubject );
  } catch ( KettleDatabaseException dbe ) {
    throw new KettleException( BaseMessages.getString( PKG, "WriteToLog.Error.UnableToSaveToRepository.Label" )
      + id_job, dbe );
  }
}
 
Example 8
Source File: HL7MLLPAcknowledge.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException {
  try {
    rep.saveJobEntryAttribute( id_job, getObjectId(), "server", server );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "port", port );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "variable", variableName );
  } catch ( KettleDatabaseException dbe ) {
    throw new KettleException( "Unable to save job entry of type 'ftp' to the repository for id_job=" + id_job, dbe );
  }
}
 
Example 9
Source File: JobEntryFileCompare.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException {
  try {
    rep.saveJobEntryAttribute( id_job, getObjectId(), "filename1", filename1 );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "filename2", filename2 );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "add_filename_result", addFilenameToResult );
  } catch ( KettleDatabaseException dbe ) {
    throw new KettleException( BaseMessages.getString(
      PKG, "JobEntryFileCompare.ERROR_0003_Unable_To_Save_Job", id_job ), dbe );
  }
}
 
Example 10
Source File: JobEntryTelnet.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException {
  try {
    rep.saveJobEntryAttribute( id_job, getObjectId(), "hostname", hostname );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "port", port );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "timeout", timeout );
  } catch ( KettleDatabaseException dbe ) {
    throw new KettleException( "Unable to save job entry of type 'Telnet' to the repository for id_job="
      + id_job, dbe );
  }
}
 
Example 11
Source File: JobEntryConnectedToRepository.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException {
  try {
    rep.saveJobEntryAttribute( id_job, getObjectId(), "isspecificrep", isspecificrep );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "repname", repname );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "isspecificuser", isspecificuser );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "username", username );
  } catch ( KettleDatabaseException dbe ) {
    throw new KettleException( BaseMessages.getString(
      PKG, "JobEntryConnectedToRepository.Meta.UnableToSaveToRep" )
      + id_job, dbe );
  }
}
 
Example 12
Source File: JobEntryDeleteResultFilenames.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException {
  try {
    rep.saveJobEntryAttribute( id_job, getObjectId(), "foldername", foldername );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "specify_wildcard", specifywildcard );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "wildcard", wildcard );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "wildcardexclude", wildcardexclude );
  } catch ( KettleDatabaseException dbe ) {
    throw new KettleXMLException( BaseMessages.getString(
      PKG, "JobEntryDeleteResultFilenames.CanNotSaveToRep", "" + id_job, dbe.getMessage() ) );
  }
}
 
Example 13
Source File: JobEntryEvalFilesMetrics.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException {
  try {

    // save the arguments...
    if ( sourceFileFolder != null ) {
      for ( int i = 0; i < sourceFileFolder.length; i++ ) {
        rep.saveJobEntryAttribute( id_job, getObjectId(), i, "source_filefolder", sourceFileFolder[i] );
        rep.saveJobEntryAttribute( id_job, getObjectId(), i, "wildcard", sourceWildcard[i] );
        rep.saveJobEntryAttribute( id_job, getObjectId(), i, "include_subFolders", sourceIncludeSubfolders[i] );
      }
    }

    rep.saveJobEntryAttribute( id_job, getObjectId(), "result_filenames_wildcard", resultFilenamesWildcard );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "result_field_file", ResultFieldFile );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "result_field_wild", ResultFieldWildcard );
    rep.saveJobEntryAttribute(
      id_job, getObjectId(), "result_field_includesubfolders", ResultFieldIncludesubFolders );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "comparevalue", comparevalue );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "minvalue", minvalue );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "maxvalue", maxvalue );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "successnumbercondition", JobEntrySimpleEval
      .getSuccessNumberConditionCode( successConditionType ) );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "scale", getScaleCode( scale ) );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "source_files", getSourceFilesCode( sourceFiles ) );
    rep
      .saveJobEntryAttribute( id_job, getObjectId(), "evaluation_type", getEvaluationTypeCode( evaluationType ) );
  } catch ( KettleDatabaseException dbe ) {
    throw new KettleException( BaseMessages.getString( PKG, "JobEvalFilesMetrics.Error.Exception.UnableSaveRep" )
      + id_job, dbe );
  }
}
 
Example 14
Source File: JobEntrySFTPPUT.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException {
  try {
    rep.saveJobEntryAttribute( id_job, getObjectId(), "servername", serverName );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "serverport", serverPort );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "username", userName );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "password", Encr
      .encryptPasswordIfNotUsingVariables( password ) );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "sftpdirectory", sftpDirectory );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "localdirectory", localDirectory );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "wildcard", wildcard );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "copyprevious", copyprevious );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "copypreviousfiles", copypreviousfiles );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "addFilenameResut", addFilenameResut );

    rep.saveJobEntryAttribute( id_job, getObjectId(), "usekeyfilename", usekeyfilename );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "keyfilename", keyfilename );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "keyfilepass", Encr
      .encryptPasswordIfNotUsingVariables( keyfilepass ) );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "compression", compression );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "proxyType", proxyType );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "proxyHost", proxyHost );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "proxyPort", proxyPort );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "proxyUsername", proxyUsername );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "proxyPassword", Encr
      .encryptPasswordIfNotUsingVariables( proxyPassword ) );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "aftersftpput", getAfterSFTPPutCode( getAfterFTPS() ) );

    rep.saveJobEntryAttribute( id_job, getObjectId(), "createRemoteFolder", createRemoteFolder );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "destinationfolder", destinationfolder );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "createdestinationfolder", createDestinationFolder );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "successWhenNoFile", successWhenNoFile );

  } catch ( KettleDatabaseException dbe ) {
    throw new KettleException( "Unable to load job entry of type 'SFTPPUT' to the repository for id_job="
      + id_job, dbe );
  }
}
 
Example 15
Source File: JobEntryAbort.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException {
  try {
    rep.saveJobEntryAttribute( id_job, getObjectId(), "message", messageAbort );

  } catch ( KettleDatabaseException dbe ) {
    throw new KettleException( BaseMessages.getString( PKG, "JobEntryAbort.UnableToSaveToRepo.Label", String
      .valueOf( id_job ) ), dbe );
  }
}
 
Example 16
Source File: JobEntryMssqlBulkLoad.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException {
  try {
    rep.saveJobEntryAttribute( id_job, getObjectId(), "schemaname", schemaname );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "tablename", tablename );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "filename", filename );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "datafiletype", datafiletype );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "fieldterminator", fieldterminator );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "lineterminated", lineterminated );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "codepage", codepage );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "specificcodepage", specificcodepage );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "formatfilename", formatfilename );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "firetriggers", firetriggers );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "checkconstraints", checkconstraints );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "keepnulls", keepnulls );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "keepidentity", keepidentity );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "tablock", tablock );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "startfile", startfile );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "endfile", endfile );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "orderby", orderby );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "orderdirection", orderdirection );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "errorfilename", errorfilename );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "maxerrors", maxerrors );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "batchsize", batchsize );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "rowsperbatch", rowsperbatch );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "adddatetime", adddatetime );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "addfiletoresult", addfiletoresult );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "truncate", truncate );

    rep.saveDatabaseMetaJobEntryAttribute( id_job, getObjectId(), "connection", "id_database", connection );
  } catch ( KettleDatabaseException dbe ) {
    throw new KettleException(
      "Unable to load job entry of type 'MSsql Bulk Load' to the repository for id_job=" + id_job, dbe );
  }
}
 
Example 17
Source File: JobEntryDTDValidator.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException {
  try {
    rep.saveJobEntryAttribute( id_job, getObjectId(), "xmlfilename", xmlfilename );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "dtdfilename", dtdfilename );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "dtdintern", dtdintern );
  } catch ( KettleDatabaseException dbe ) {
    throw new KettleException( "Unable to save job entry of type 'DTDvalidator' to the repository for id_job="
        + id_job, dbe );
  }
}
 
Example 18
Source File: CheckConnections.java    From knowbi-pentaho-pdi-neo4j-output with Apache License 2.0 4 votes vote down vote up
@Override public void saveRep( Repository rep, IMetaStore metaStore, ObjectId jobId ) throws KettleException {
  for ( int i = 0; i < connectionNames.size(); i++ ) {
    rep.saveJobEntryAttribute( jobId, getObjectId(), i, "connection", connectionNames.get( i ) );
  }
}
 
Example 19
Source File: JobEntryCopyFiles.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
protected void saveSourceRep( Repository rep, ObjectId id_job, ObjectId id_jobentry, int i, String value )
  throws KettleException {
  String namedCluster = configurationMappings.get( value );
  rep.saveJobEntryAttribute( id_job, getObjectId(), i, SOURCE_FILE_FOLDER, value );
  rep.saveJobEntryAttribute( id_job, id_jobentry, i, SOURCE_CONFIGURATION_NAME, namedCluster );
}
 
Example 20
Source File: JobEntryMoveFiles.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException {
  try {
    rep.saveJobEntryAttribute( id_job, getObjectId(), "move_empty_folders", move_empty_folders );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "arg_from_previous", arg_from_previous );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "include_subfolders", include_subfolders );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "add_result_filesname", add_result_filesname );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "destination_is_a_file", destination_is_a_file );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "create_destination_folder", create_destination_folder );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "nr_errors_less_than", nr_errors_less_than );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "success_condition", success_condition );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "add_date", add_date );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "add_time", add_time );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "SpecifyFormat", SpecifyFormat );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "date_time_format", date_time_format );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "AddDateBeforeExtension", AddDateBeforeExtension );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "DoNotKeepFolderStructure", DoNotKeepFolderStructure );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "iffileexists", iffileexists );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "destinationFolder", destinationFolder );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "ifmovedfileexists", ifmovedfileexists );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "moved_date_time_format", moved_date_time_format );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "add_moved_date", add_moved_date );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "add_moved_time", add_moved_time );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "SpecifyMoveFormat", SpecifyMoveFormat );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "create_move_to_folder", create_move_to_folder );
    rep
      .saveJobEntryAttribute(
        id_job, getObjectId(), "AddMovedDateBeforeExtension", AddMovedDateBeforeExtension );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "simulate", simulate );

    // save the arguments...
    if ( source_filefolder != null ) {
      for ( int i = 0; i < source_filefolder.length; i++ ) {
        rep.saveJobEntryAttribute( id_job, getObjectId(), i, "source_filefolder", source_filefolder[i] );
        rep
          .saveJobEntryAttribute(
            id_job, getObjectId(), i, "destination_filefolder", destination_filefolder[i] );
        rep.saveJobEntryAttribute( id_job, getObjectId(), i, "wildcard", wildcard[i] );
      }
    }
  } catch ( KettleDatabaseException dbe ) {

    throw new KettleException( BaseMessages.getString( PKG, "JobMoveFiles.Error.Exception.UnableSaveRep" )
      + id_job, dbe );
  }
}