org.pentaho.di.core.exception.KettleXMLException Java Examples
The following examples show how to use
org.pentaho.di.core.exception.KettleXMLException.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: RandomCCNumberGeneratorMeta.java From pentaho-kettle with Apache License 2.0 | 6 votes |
private void readData( Node stepnode ) throws KettleXMLException { try { Node fields = XMLHandler.getSubNode( stepnode, "fields" ); int count = XMLHandler.countNodes( fields, "field" ); allocate( count ); for ( int i = 0; i < count; i++ ) { Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i ); fieldCCType[i] = XMLHandler.getTagValue( fnode, "cctype" ); fieldCCLength[i] = XMLHandler.getTagValue( fnode, "cclen" ); fieldCCSize[i] = XMLHandler.getTagValue( fnode, "ccsize" ); } cardNumberFieldName = XMLHandler.getTagValue( stepnode, "cardNumberFieldName" ); cardLengthFieldName = XMLHandler.getTagValue( stepnode, "cardLengthFieldName" ); cardTypeFieldName = XMLHandler.getTagValue( stepnode, "cardTypeFieldName" ); } catch ( Exception e ) { throw new KettleXMLException( "Unable to read step information from XML", e ); } }
Example #2
Source File: CheckSumMeta.java From pentaho-kettle with Apache License 2.0 | 6 votes |
private void readData( Node stepnode ) throws KettleXMLException { try { checksumtype = XMLHandler.getTagValue( stepnode, "checksumtype" ); resultfieldName = XMLHandler.getTagValue( stepnode, "resultfieldName" ); resultType = getResultTypeByCode( Const.NVL( XMLHandler.getTagValue( stepnode, "resultType" ), "" ) ); compatibilityMode = parseCompatibilityMode( XMLHandler.getTagValue( stepnode, "compatibilityMode" ) ); oldChecksumBehaviour = parseOldChecksumBehaviour( XMLHandler.getTagValue( stepnode, "oldChecksumBehaviour" ) ); setFieldSeparatorString( XMLHandler.getTagValue( stepnode, "fieldSeparatorString" ) ); Node fields = XMLHandler.getSubNode( stepnode, "fields" ); int nrfields = XMLHandler.countNodes( fields, "field" ); allocate( nrfields ); for ( int i = 0; i < nrfields; i++ ) { Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i ); fieldName[i] = XMLHandler.getTagValue( fnode, "name" ); } } catch ( Exception e ) { throw new KettleXMLException( "Unable to load step info from XML", e ); } }
Example #3
Source File: HBaseValueMetaInterfaceImplTest.java From pentaho-hadoop-shims with Apache License 2.0 | 6 votes |
private void verifyXml( HBaseValueMetaInterfaceImpl toVerify, StringBuilder result, boolean isIndexedStorageType ) throws KettleXMLException, ParserConfigurationException { assertNotEquals( 0, result.length() ); Document document = loadDocumentFromString( result ); Node node = XMLHandler.getSubNode( document, XML_TAG_FIELD ); assertEquals( hBaseValueMetaInterface.getTableName(), XMLHandler.getTagValue( node, "table_name" ) ); assertEquals( hBaseValueMetaInterface.getMappingName(), XMLHandler.getTagValue( node, "mapping_name" ) ); assertEquals( hBaseValueMetaInterface.getAlias(), XMLHandler.getTagValue( node, "alias" ) ); assertEquals( hBaseValueMetaInterface.getColumnFamily(), XMLHandler.getTagValue( node, "family" ) ); assertEquals( hBaseValueMetaInterface.getColumnName(), XMLHandler.getTagValue( node, "column" ) ); assertEquals( hBaseValueMetaInterface.isKey() ? "Y" : "N", XMLHandler.getTagValue( node, "key" ) ); assertEquals( ValueMeta.getTypeDesc( hBaseValueMetaInterface.getType() ), XMLHandler.getTagValue( node, "type" ) ); assertEquals( hBaseValueMetaInterface.getConversionMask(), XMLHandler.getTagValue( node, "format" ) ); assertEquals( isIndexedStorageType ? EXPECTED_INDEXES_ROW : null, XMLHandler.getTagValue( node, "index_values" ) ); }
Example #4
Source File: SwitchCaseMeta.java From pentaho-kettle with Apache License 2.0 | 6 votes |
private void readData( Node stepnode ) throws KettleXMLException { try { fieldname = XMLHandler.getTagValue( stepnode, "fieldname" ); isContains = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "use_contains" ) ); caseValueType = ValueMetaBase.getType( XMLHandler.getTagValue( stepnode, "case_value_type" ) ); caseValueFormat = XMLHandler.getTagValue( stepnode, "case_value_format" ); caseValueDecimal = XMLHandler.getTagValue( stepnode, "case_value_decimal" ); caseValueGroup = XMLHandler.getTagValue( stepnode, "case_value_group" ); defaultTargetStepname = XMLHandler.getTagValue( stepnode, "default_target_step" ); Node casesNode = XMLHandler.getSubNode( stepnode, XML_TAG_CASE_VALUES ); int nrCases = XMLHandler.countNodes( casesNode, XML_TAG_CASE_VALUE ); allocate(); for ( int i = 0; i < nrCases; i++ ) { Node caseNode = XMLHandler.getSubNodeByNr( casesNode, XML_TAG_CASE_VALUE, i ); SwitchCaseTarget target = new SwitchCaseTarget(); target.caseValue = XMLHandler.getTagValue( caseNode, "value" ); target.caseTargetStepname = XMLHandler.getTagValue( caseNode, "target_step" ); caseTargets.add( target ); } } catch ( Exception e ) { throw new KettleXMLException( BaseMessages.getString( PKG, "SwitchCaseMeta.Exception..UnableToLoadStepInfoFromXML" ), e ); } }
Example #5
Source File: SortedMergeMeta.java From pentaho-kettle with Apache License 2.0 | 6 votes |
private void readData( Node stepnode ) throws KettleXMLException { try { Node fields = XMLHandler.getSubNode( stepnode, "fields" ); int nrfields = XMLHandler.countNodes( fields, "field" ); allocate( nrfields ); for ( int i = 0; i < nrfields; i++ ) { Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i ); fieldName[i] = XMLHandler.getTagValue( fnode, "name" ); String asc = XMLHandler.getTagValue( fnode, "ascending" ); if ( asc.equalsIgnoreCase( "Y" ) ) { ascending[i] = true; } else { ascending[i] = false; } } } catch ( Exception e ) { throw new KettleXMLException( "Unable to load step info from XML", e ); } }
Example #6
Source File: InfobrightLoaderMeta.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@Override public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException { super.loadXML( stepnode, databases, metaStore ); try { dataFormat = Enum.valueOf( DataFormat.class, XMLHandler.getTagValue( stepnode, TAG_DATA_FORMAT ) ); agentPort = Integer.parseInt( Const.NVL( XMLHandler.getTagValue( stepnode, TAG_AGENT_PORT ), Integer .toString( InfobrightNamedPipeLoader.AGENT_DEFAULT_PORT ) ) ); String charsetName = XMLHandler.getTagValue( stepnode, TAG_CHARSET ); charset = ( charsetName == null ? InfobrightNamedPipeLoader.DEFAULT_CHARSET : Charset.forName( charsetName ) ); debugFile = XMLHandler.getTagValue( stepnode, TAG_DEBUG_FILE ); } catch ( Exception e ) { throw new KettleXMLException( "Unable to load step info from XML", e ); } }
Example #7
Source File: PentahoReportingOutputMeta.java From pentaho-kettle with Apache License 2.0 | 6 votes |
private void readData( Node stepnode ) throws KettleXMLException { try { inputFileField = XMLHandler.getTagValue( stepnode, "input_file_field" ); outputFileField = XMLHandler.getTagValue( stepnode, "output_file_field" ); createParentfolder = "Y".equals( XMLHandler.getTagValue( stepnode, "create_parent_folder" ) ); parameterFieldMap = new HashMap<String, String>(); Node parsNode = XMLHandler.getSubNode( stepnode, XML_TAG_PARAMETERS ); List<Node> nodes = XMLHandler.getNodes( parsNode, XML_TAG_PARAMETER ); for ( Node node : nodes ) { String parameter = XMLHandler.getTagValue( node, "name" ); String fieldname = XMLHandler.getTagValue( node, "field" ); if ( !Utils.isEmpty( parameter ) && !Utils.isEmpty( fieldname ) ) { parameterFieldMap.put( parameter, fieldname ); } } outputProcessorType = ProcessorType.getProcessorTypeByCode( XMLHandler.getTagValue( stepnode, "processor_type" ) ); } catch ( Exception e ) { throw new KettleXMLException( BaseMessages.getString( PKG, "PentahoReportingOutputMeta.Exception.UnableToLoadStepInfo" ), e ); } }
Example #8
Source File: JobEntryFilesExist.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, Repository rep, IMetaStore metaStore ) throws KettleXMLException { try { super.loadXML( entrynode, databases, slaveServers ); filename = XMLHandler.getTagValue( entrynode, "filename" ); Node fields = XMLHandler.getSubNode( entrynode, "fields" ); // How many field arguments? int nrFields = XMLHandler.countNodes( fields, "field" ); allocate( nrFields ); // Read them all... for ( int i = 0; i < nrFields; i++ ) { Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i ); arguments[i] = XMLHandler.getTagValue( fnode, "name" ); } } catch ( KettleXMLException xe ) { throw new KettleXMLException( BaseMessages.getString( PKG, "JobEntryFilesExist.ERROR_0001_Cannot_Load_Job_Entry_From_Xml_Node", xe.getMessage() ) ); } }
Example #9
Source File: RandomValueMeta.java From pentaho-kettle with Apache License 2.0 | 6 votes |
private void readData( Node stepnode ) throws KettleXMLException { try { Node fields = XMLHandler.getSubNode( stepnode, "fields" ); int count = XMLHandler.countNodes( fields, "field" ); String type; allocate( count ); for ( int i = 0; i < count; i++ ) { Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i ); fieldName[i] = XMLHandler.getTagValue( fnode, "name" ); type = XMLHandler.getTagValue( fnode, "type" ); fieldType[i] = getType( type ); } } catch ( Exception e ) { throw new KettleXMLException( "Unable to read step information from XML", e ); } }
Example #10
Source File: JobEntryExportRepository.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, Repository rep, IMetaStore metaStore ) throws KettleXMLException { try { super.loadXML( entrynode, databases, slaveServers ); repositoryname = XMLHandler.getTagValue( entrynode, "repositoryname" ); username = XMLHandler.getTagValue( entrynode, "username" ); password = Encr.decryptPasswordOptionallyEncrypted( XMLHandler.getTagValue( entrynode, "password" ) ); targetfilename = XMLHandler.getTagValue( entrynode, "targetfilename" ); iffileexists = XMLHandler.getTagValue( entrynode, "iffileexists" ); export_type = XMLHandler.getTagValue( entrynode, "export_type" ); directoryPath = XMLHandler.getTagValue( entrynode, "directoryPath" ); add_date = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "add_date" ) ); add_time = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "add_time" ) ); SpecifyFormat = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "SpecifyFormat" ) ); date_time_format = XMLHandler.getTagValue( entrynode, "date_time_format" ); createfolder = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "createfolder" ) ); newfolder = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "newfolder" ) ); add_result_filesname = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "add_result_filesname" ) ); nr_errors_less_than = XMLHandler.getTagValue( entrynode, "nr_errors_less_than" ); success_condition = XMLHandler.getTagValue( entrynode, "success_condition" ); } catch ( KettleXMLException xe ) { throw new KettleXMLException( BaseMessages.getString( PKG, "JobExportRepository.Meta.UnableLoadXML" ), xe ); } }
Example #11
Source File: StringCutMeta.java From pentaho-kettle with Apache License 2.0 | 6 votes |
private void readData( Node stepnode ) throws KettleXMLException { try { int nrkeys; Node lookup = XMLHandler.getSubNode( stepnode, "fields" ); nrkeys = XMLHandler.countNodes( lookup, "field" ); allocate( nrkeys ); for ( int i = 0; i < nrkeys; i++ ) { Node fnode = XMLHandler.getSubNodeByNr( lookup, "field", i ); fieldInStream[i] = Const.NVL( XMLHandler.getTagValue( fnode, "in_stream_name" ), "" ); fieldOutStream[i] = Const.NVL( XMLHandler.getTagValue( fnode, "out_stream_name" ), "" ); cutFrom[i] = Const.NVL( XMLHandler.getTagValue( fnode, "cut_from" ), "" ); cutTo[i] = Const.NVL( XMLHandler.getTagValue( fnode, "cut_to" ), "" ); } } catch ( Exception e ) { throw new KettleXMLException( BaseMessages.getString( PKG, "StringCutMeta.Exception.UnableToReadStepInfoFromXML" ), e ); } }
Example #12
Source File: WebResult.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public static WebResult fromXMLString( String xml ) throws KettleXMLException { try { Document doc = XMLHandler.loadXMLString( xml ); Node node = XMLHandler.getSubNode( doc, XML_TAG ); return new WebResult( node ); } catch ( Exception e ) { throw new KettleXMLException( BaseMessages.getString( PKG, "WebResult.Error.UnableCreateResult" ), e ); } }
Example #13
Source File: DragAndDropContainer.java From pentaho-kettle with Apache License 2.0 | 5 votes |
/** * Construct a Drag and drop container from an XML String * * @param xml * The XML string to convert from */ public DragAndDropContainer( String xml ) throws KettleXMLException { try { Document doc = XMLHandler.loadXMLString( xml ); Node dnd = XMLHandler.getSubNode( doc, XML_TAG ); id = XMLHandler.getTagValue( dnd, "ID" ); type = getType( XMLHandler.getTagValue( dnd, "DragType" ) ); data = new String( Base64.decodeBase64( XMLHandler.getTagValue( dnd, "Data" ).getBytes() ), Const.XML_ENCODING ); } catch ( Exception e ) { throw new KettleXMLException( "Unexpected error parsing Drag & Drop XML fragment: " + xml, e ); } }
Example #14
Source File: GenerateCsvMeta.java From knowbi-pentaho-pdi-neo4j-output with Apache License 2.0 | 5 votes |
@Override public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException { graphFieldName = XMLHandler.getTagValue( stepnode, GRAPH_FIELD_NAME ); baseFolder = XMLHandler.getTagValue( stepnode, BASE_FOLDER ); uniquenessStrategy = UniquenessStrategy.getStrategyFromName( XMLHandler.getTagValue( stepnode, UNIQUENESS_STRATEGY ) ); filesPrefix = XMLHandler.getTagValue( stepnode, FILES_PREFIX ); filenameField = XMLHandler.getTagValue( stepnode, FILENAME_FIELD ); fileTypeField = XMLHandler.getTagValue( stepnode, FILE_TYPE_FIELD ); }
Example #15
Source File: RssInputMeta.java From pentaho-kettle with Apache License 2.0 | 5 votes |
private void readData( Node stepnode ) throws KettleXMLException { try { urlInField = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "url_in_field" ) ); urlFieldname = XMLHandler.getTagValue( stepnode, "url_field_name" ); includeRowNumber = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "rownum" ) ); rowNumberField = XMLHandler.getTagValue( stepnode, "rownum_field" ); includeUrl = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "include_url" ) ); urlField = XMLHandler.getTagValue( stepnode, "url_Field" ); readfrom = XMLHandler.getTagValue( stepnode, "read_from" ); Node fields = XMLHandler.getSubNode( stepnode, "fields" ); int nrFields = XMLHandler.countNodes( fields, "field" ); Node urlnode = XMLHandler.getSubNode( stepnode, "urls" ); int nrUrls = XMLHandler.countNodes( urlnode, "url" ); allocate( nrUrls, nrFields ); for ( int i = 0; i < nrUrls; i++ ) { Node urlnamenode = XMLHandler.getSubNodeByNr( urlnode, "url", i ); url[i] = XMLHandler.getNodeValue( urlnamenode ); } for ( int i = 0; i < nrFields; i++ ) { Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i ); RssInputField field = new RssInputField( fnode ); inputFields[i] = field; } // Is there a limit on the number of rows we process? rowLimit = Const.toLong( XMLHandler.getTagValue( stepnode, "limit" ), 0L ); } catch ( Exception e ) { throw new KettleXMLException( "Unable to load step info from XML", e ); } }
Example #16
Source File: JaninoMeta.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException { int nrCalcs = XMLHandler.countNodes( stepnode, JaninoMetaFunction.XML_TAG ); allocate( nrCalcs ); for ( int i = 0; i < nrCalcs; i++ ) { Node calcnode = XMLHandler.getSubNodeByNr( stepnode, JaninoMetaFunction.XML_TAG, i ); formula[i] = new JaninoMetaFunction( calcnode ); } }
Example #17
Source File: JobEntryMSAccessBulkLoad.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, Repository rep, IMetaStore metaStore ) throws KettleXMLException { try { super.loadXML( entrynode, databases, slaveServers ); include_subfolders = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "include_subfolders" ) ); add_result_filenames = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "add_result_filenames" ) ); is_args_from_previous = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "is_args_from_previous" ) ); limit = XMLHandler.getTagValue( entrynode, "limit" ); success_condition = XMLHandler.getTagValue( entrynode, "success_condition" ); Node fields = XMLHandler.getSubNode( entrynode, "fields" ); // How many field arguments? int nrFields = XMLHandler.countNodes( fields, "field" ); source_filefolder = new String[nrFields]; delimiter = new String[nrFields]; source_wildcard = new String[nrFields]; target_Db = new String[nrFields]; target_table = new String[nrFields]; // Read them all... for ( int i = 0; i < nrFields; i++ ) { Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i ); source_filefolder[i] = XMLHandler.getTagValue( fnode, "source_filefolder" ); source_wildcard[i] = XMLHandler.getTagValue( fnode, "source_wildcard" ); delimiter[i] = XMLHandler.getTagValue( fnode, "delimiter" ); target_Db[i] = XMLHandler.getTagValue( fnode, "target_db" ); target_table[i] = XMLHandler.getTagValue( fnode, "target_table" ); } } catch ( KettleXMLException xe ) { throw new KettleXMLException( BaseMessages.getString( PKG, "JobEntryMSAccessBulkLoad.Meta.UnableLoadXML", xe .getMessage() ), xe ); } }
Example #18
Source File: JobEntryEvalFilesMetrics.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, Repository rep, IMetaStore metaStore ) throws KettleXMLException { try { super.loadXML( entrynode, databases, slaveServers ); Node fields = XMLHandler.getSubNode( entrynode, "fields" ); // How many field arguments? int nrFields = XMLHandler.countNodes( fields, "field" ); allocate( nrFields ); // Read them all... for ( int i = 0; i < nrFields; i++ ) { Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i ); sourceFileFolder[i] = XMLHandler.getTagValue( fnode, "source_filefolder" ); sourceWildcard[i] = XMLHandler.getTagValue( fnode, "wildcard" ); sourceIncludeSubfolders[i] = XMLHandler.getTagValue( fnode, "include_subFolders" ); } resultFilenamesWildcard = XMLHandler.getTagValue( entrynode, "result_filenames_wildcard" ); ResultFieldFile = XMLHandler.getTagValue( entrynode, "result_field_file" ); ResultFieldWildcard = XMLHandler.getTagValue( entrynode, "result_field_wildcard" ); ResultFieldIncludesubFolders = XMLHandler.getTagValue( entrynode, "result_field_includesubfolders" ); comparevalue = XMLHandler.getTagValue( entrynode, "comparevalue" ); minvalue = XMLHandler.getTagValue( entrynode, "minvalue" ); maxvalue = XMLHandler.getTagValue( entrynode, "maxvalue" ); successConditionType = JobEntrySimpleEval.getSuccessNumberConditionByCode( Const.NVL( XMLHandler.getTagValue( entrynode, "successnumbercondition" ), "" ) ); sourceFiles = getSourceFilesByCode( Const.NVL( XMLHandler.getTagValue( entrynode, "source_files" ), "" ) ); evaluationType = getEvaluationTypeByCode( Const.NVL( XMLHandler.getTagValue( entrynode, "evaluation_type" ), "" ) ); scale = getScaleByCode( Const.NVL( XMLHandler.getTagValue( entrynode, "scale" ), "" ) ); } catch ( KettleXMLException xe ) { throw new KettleXMLException( BaseMessages.getString( PKG, "JobEvalFilesMetrics.Error.Exception.UnableLoadXML" ), xe ); } }
Example #19
Source File: PrioritizeStreamsMeta.java From pentaho-kettle with Apache License 2.0 | 5 votes |
private void readData( Node stepnode, List<? extends SharedObjectInterface> databases ) throws KettleXMLException { try { Node steps = XMLHandler.getSubNode( stepnode, "steps" ); int nrsteps = XMLHandler.countNodes( steps, "step" ); allocate( nrsteps ); for ( int i = 0; i < nrsteps; i++ ) { Node fnode = XMLHandler.getSubNodeByNr( steps, "step", i ); stepName[i] = XMLHandler.getTagValue( fnode, "name" ); } } catch ( Exception e ) { throw new KettleXMLException( "Unable to load step info from XML", e ); } }
Example #20
Source File: S3CsvInputMeta.java From pentaho-kettle with Apache License 2.0 | 5 votes |
private void readData( Node stepnode ) throws KettleXMLException { try { awsAccessKey = Encr.decryptPasswordOptionallyEncrypted( XMLHandler.getTagValue( stepnode, "aws_access_key" ) ); awsSecretKey = Encr.decryptPasswordOptionallyEncrypted( XMLHandler.getTagValue( stepnode, "aws_secret_key" ) ); bucket = XMLHandler.getTagValue( stepnode, "bucket" ); filename = XMLHandler.getTagValue( stepnode, "filename" ); filenameField = XMLHandler.getTagValue( stepnode, "filename_field" ); rowNumField = XMLHandler.getTagValue( stepnode, "rownum_field" ); includingFilename = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "include_filename" ) ); delimiter = XMLHandler.getTagValue( stepnode, "separator" ); enclosure = XMLHandler.getTagValue( stepnode, "enclosure" ); maxLineSize = XMLHandler.getTagValue( stepnode, "max_line_size" ); headerPresent = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "header" ) ); lazyConversionActive = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "lazy_conversion" ) ); runningInParallel = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "parallel" ) ); Node fields = XMLHandler.getSubNode( stepnode, "fields" ); int nrfields = XMLHandler.countNodes( fields, "field" ); allocate( nrfields ); for ( int i = 0; i < nrfields; i++ ) { inputFields[i] = new TextFileInputField(); Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i ); inputFields[i].setName( XMLHandler.getTagValue( fnode, "name" ) ); inputFields[i].setType( ValueMetaFactory.getIdForValueMeta( XMLHandler.getTagValue( fnode, "type" ) ) ); inputFields[i].setFormat( XMLHandler.getTagValue( fnode, "format" ) ); inputFields[i].setCurrencySymbol( XMLHandler.getTagValue( fnode, "currency" ) ); inputFields[i].setDecimalSymbol( XMLHandler.getTagValue( fnode, "decimal" ) ); inputFields[i].setGroupSymbol( XMLHandler.getTagValue( fnode, "group" ) ); inputFields[i].setLength( Const.toInt( XMLHandler.getTagValue( fnode, "length" ), -1 ) ); inputFields[i].setPrecision( Const.toInt( XMLHandler.getTagValue( fnode, "precision" ), -1 ) ); inputFields[i].setTrimType( ValueMetaString.getTrimTypeByCode( XMLHandler.getTagValue( fnode, "trim_type" ) ) ); } } catch ( Exception e ) { throw new KettleXMLException( "Unable to load step info from XML", e ); } }
Example #21
Source File: JobEntrySQL.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, Repository rep, IMetaStore metaStore ) throws KettleXMLException { try { super.loadXML( entrynode, databases, slaveServers ); sql = XMLHandler.getTagValue( entrynode, "sql" ); String dbname = XMLHandler.getTagValue( entrynode, CONNECTION_TAG ); String sSubs = XMLHandler.getTagValue( entrynode, USE_VARIABLE_SUBSTITUTION_TAG ); if ( sSubs != null && sSubs.equalsIgnoreCase( "T" ) ) { useVariableSubstitution = true; } databaseMeta = DatabaseMeta.findDatabase( databases, dbname ); String ssql = XMLHandler.getTagValue( entrynode, SQLFROMFILE_TAG ); if ( ssql != null && ssql.equalsIgnoreCase( "T" ) ) { sqlFromFile = true; } sqlFilename = XMLHandler.getTagValue( entrynode, SQLFILENAME_TAG ); String sOneStatement = XMLHandler.getTagValue( entrynode, SEND_ONE_STATEMENT_TAG ); if ( sOneStatement != null && sOneStatement.equalsIgnoreCase( "T" ) ) { sendOneStatement = true; } } catch ( KettleException e ) { throw new KettleXMLException( "Unable to load job entry of type 'sql' from XML node", e ); } }
Example #22
Source File: SlaveServerConfigTest.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Test public void testParseJettyOption_NoOptionsNode() throws KettleXMLException { Node configNode = getConfigNode( getConfigWithNoOptionsNode() ); Map<String, String> parseJettyOptions = slServerConfig.parseJettyOptions( configNode ); assertNull( parseJettyOptions ); }
Example #23
Source File: HBaseValueMetaInterfaceFactoryImpl.java From pentaho-hadoop-shims with Apache License 2.0 | 5 votes |
@Override public List<HBaseValueMetaInterface> createListFromNode( Node stepnode ) throws KettleXMLException { Node fields = XMLHandler.getSubNode( stepnode, "output_fields" ); int nrfields = XMLHandler.countNodes( fields, "field" ); List<HBaseValueMetaInterface> m_outputFields = new ArrayList<>( nrfields ); for ( int i = 0; i < nrfields; i++ ) { m_outputFields.add( createFromNode( XMLHandler.getSubNodeByNr( fields, "field", i ) ) ); } return m_outputFields; }
Example #24
Source File: SlaveServerConfigTest.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Test public void testDoNotSetUpJettyOptionsAsSystemParameters_WhenEmptyOptionsNode() throws KettleXMLException { Node configNode = getConfigNode( getConfigWithEmptyOptionsNode() ); slServerConfig.setUpJettyOptions( configNode ); assertFalse( "There should not be any jetty option but it is here: " + EXPECTED_ACCEPTORS_KEY, System .getProperties().containsKey( EXPECTED_ACCEPTORS_KEY ) ); assertFalse( "There should not be any jetty option but it is here: " + EXPECTED_ACCEPT_QUEUE_SIZE_KEY, System .getProperties().containsKey( EXPECTED_ACCEPT_QUEUE_SIZE_KEY ) ); assertFalse( "There should not be any jetty option but it is here: " + EXPECTED_LOW_RES_MAX_IDLE_TIME_KEY, System .getProperties().containsKey( EXPECTED_LOW_RES_MAX_IDLE_TIME_KEY ) ); }
Example #25
Source File: ExecProcessMeta.java From pentaho-kettle with Apache License 2.0 | 5 votes |
private void readData( Node stepnode, List<? extends SharedObjectInterface> databases ) throws KettleXMLException { try { processfield = XMLHandler.getTagValue( stepnode, "processfield" ); resultfieldname = XMLHandler.getTagValue( stepnode, "resultfieldname" ); errorfieldname = XMLHandler.getTagValue( stepnode, "errorfieldname" ); exitvaluefieldname = XMLHandler.getTagValue( stepnode, "exitvaluefieldname" ); failwhennotsuccess = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "failwhennotsuccess" ) ); outputLineDelimiter = XMLHandler.getTagValue( stepnode, "outputlinedelimiter" ); if ( outputLineDelimiter == null ) { outputLineDelimiter = ""; // default to empty string for backward compatibility } argumentsInFields = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "argumentsInFields" ) ); Node argumentFieldsNode = XMLHandler.getSubNode( stepnode, "argumentFields" ); if ( argumentFieldsNode == null ) { argumentFieldNames = new String[0]; } else { int argumentFieldCount = XMLHandler.countNodes( argumentFieldsNode, "argumentField" ); argumentFieldNames = new String[argumentFieldCount]; for ( int i = 0; i < argumentFieldCount; i++ ) { Node fnode = XMLHandler.getSubNodeByNr( argumentFieldsNode, "argumentField", i ); argumentFieldNames[i] = XMLHandler.getTagValue( fnode, "argumentFieldName" ); } } } catch ( Exception e ) { throw new KettleXMLException( BaseMessages.getString( PKG, "ExecProcessMeta.Exception.UnableToReadStepInfo" ), e ); } }
Example #26
Source File: JobEntrySSH2PUT.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Override public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, Repository rep, IMetaStore metaStore ) throws KettleXMLException { try { super.loadXML( entrynode, databases, slaveServers ); serverName = XMLHandler.getTagValue( entrynode, "servername" ); userName = XMLHandler.getTagValue( entrynode, "username" ); password = Encr.decryptPasswordOptionallyEncrypted( XMLHandler.getTagValue( entrynode, "password" ) ); serverPort = XMLHandler.getTagValue( entrynode, "serverport" ); ftpDirectory = XMLHandler.getTagValue( entrynode, "ftpdirectory" ); localDirectory = XMLHandler.getTagValue( entrynode, "localdirectory" ); wildcard = XMLHandler.getTagValue( entrynode, "wildcard" ); onlyGettingNewFiles = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "only_new" ) ); usehttpproxy = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "usehttpproxy" ) ); httpproxyhost = XMLHandler.getTagValue( entrynode, "httpproxyhost" ); httpproxyport = XMLHandler.getTagValue( entrynode, "httpproxyport" ); httpproxyusername = XMLHandler.getTagValue( entrynode, "httpproxyusername" ); httpProxyPassword = XMLHandler.getTagValue( entrynode, "httpproxypassword" ); publicpublickey = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "publicpublickey" ) ); keyFilename = XMLHandler.getTagValue( entrynode, "keyfilename" ); keyFilePass = XMLHandler.getTagValue( entrynode, "keyfilepass" ); useBasicAuthentication = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "usebasicauthentication" ) ); createRemoteFolder = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "createremotefolder" ) ); afterFtpPut = XMLHandler.getTagValue( entrynode, "afterftpput" ); destinationfolder = XMLHandler.getTagValue( entrynode, "destinationfolder" ); createDestinationFolder = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "createdestinationfolder" ) ); cachehostkey = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "cachehostkey" ) ); timeout = Const.toInt( XMLHandler.getTagValue( entrynode, "timeout" ), 0 ); } catch ( KettleXMLException xe ) { throw new KettleXMLException( BaseMessages.getString( PKG, "JobSSH2PUT.Log.UnableLoadXML", xe.getMessage() ) ); } }
Example #27
Source File: OlapUtil.java From pentaho-metadata with GNU Lesser General Public License v2.1 | 5 votes |
/** * Create an OlapRole object from an XML node * @param node * @return * @throws KettleXMLException */ public static OlapRole olapRoleFromNode( Node node ) throws KettleXMLException { String name = XMLHandler.getTagValue( node, "name" ); StringBuilder xml = new StringBuilder(); NodeList children = XMLHandler.getSubNode( node, "definition" ).getChildNodes(); for ( int i = 0; i < children.getLength(); i++ ) { xml.append( ( XMLHandler.formatNode( children.item( i ) ) ) ); } String definition = xml.toString(); return new OlapRole( name, definition ); }
Example #28
Source File: ExecSQLMeta.java From pentaho-kettle with Apache License 2.0 | 5 votes |
private void readData( Node stepnode, List<? extends SharedObjectInterface> databases ) throws KettleXMLException { try { String con = XMLHandler.getTagValue( stepnode, "connection" ); databaseMeta = DatabaseMeta.findDatabase( databases, con ); String eachRow = XMLHandler.getTagValue( stepnode, "execute_each_row" ); executedEachInputRow = "Y".equalsIgnoreCase( eachRow ); singleStatement = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "single_statement" ) ); replaceVariables = "Y".equals( XMLHandler.getTagValue( stepnode, "replace_variables" ) ); quoteString = "Y".equals( XMLHandler.getTagValue( stepnode, "quoteString" ) ); setParams = "Y".equals( XMLHandler.getTagValue( stepnode, "set_params" ) ); sql = XMLHandler.getTagValue( stepnode, "sql" ); insertField = XMLHandler.getTagValue( stepnode, "insert_field" ); updateField = XMLHandler.getTagValue( stepnode, "update_field" ); deleteField = XMLHandler.getTagValue( stepnode, "delete_field" ); readField = XMLHandler.getTagValue( stepnode, "read_field" ); Node argsnode = XMLHandler.getSubNode( stepnode, "arguments" ); int nrArguments = XMLHandler.countNodes( argsnode, "argument" ); allocate( nrArguments ); for ( int i = 0; i < nrArguments; i++ ) { Node argnode = XMLHandler.getSubNodeByNr( argsnode, "argument", i ); arguments[i] = XMLHandler.getTagValue( argnode, "name" ); } } catch ( Exception e ) { throw new KettleXMLException( BaseMessages.getString( PKG, "ExecSQLMeta.Exception.UnableToLoadStepInfoFromXML" ), e ); } }
Example #29
Source File: JobEntryCopyFiles.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, Repository rep, IMetaStore metaStore ) throws KettleXMLException { try { super.loadXML( entrynode, databases, slaveServers ); copy_empty_folders = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "copy_empty_folders" ) ); arg_from_previous = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "arg_from_previous" ) ); overwrite_files = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "overwrite_files" ) ); include_subfolders = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "include_subfolders" ) ); remove_source_files = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "remove_source_files" ) ); add_result_filesname = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "add_result_filesname" ) ); destination_is_a_file = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "destination_is_a_file" ) ); create_destination_folder = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "create_destination_folder" ) ); Node fields = XMLHandler.getSubNode( entrynode, "fields" ); // How many field arguments? int nrFields = XMLHandler.countNodes( fields, "field" ); allocate( nrFields ); // Read them all... for ( int i = 0; i < nrFields; i++ ) { Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i ); source_filefolder[i] = loadSource( fnode ); destination_filefolder[i] = loadDestination( fnode ); wildcard[i] = XMLHandler.getTagValue( fnode, "wildcard" ); } } catch ( KettleXMLException xe ) { throw new KettleXMLException( BaseMessages.getString( PKG, "JobCopyFiles.Error.Exception.UnableLoadXML" ), xe ); } }
Example #30
Source File: JobTrackerExecutionIT.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Test public void testJobTracker() throws UnknownParamException, KettleXMLException, URISyntaxException, IOException { if ( res.setAsVariable != null ) { System.getProperties().setProperty( DatabaseLogExceptionFactory.KETTLE_GLOBAL_PROP_NAME, res.setAsVariable.toString() ); } try { Job job = new Job( null, getJobMeta( res.fileName ) ); job.setLogLevel( LogLevel.BASIC ); job.start(); job.waitUntilFinished(); // this simulates - Spoon 'Job Metrics' tab attempt to refresh: JobTracker tracker = job.getJobTracker(); List<JobTracker> trackers = tracker.getJobTrackers(); Assert.assertEquals( "Job trackers count is correct: " + res.assertMessage, res.jobTrackerStatus.length, trackers .size() ); for ( int i = 0; i < res.jobTrackerStatus.length; i++ ) { JobTracker record = trackers.get( i ); Boolean actual; JobEntryResult jer = record.getJobEntryResult(); // don't look into nested JobTrackers if ( jer == null ) { actual = null; } else { actual = record.getJobEntryResult().getResult() == null ? null : Boolean.valueOf( record.getJobEntryResult() .getResult().getResult() ); } Assert.assertEquals( res.assertMessage + ": " + i, res.jobTrackerStatus[i], actual ); } } finally { System.getProperties().remove( DatabaseLogExceptionFactory.KETTLE_GLOBAL_PROP_NAME ); } }