Java Code Examples for org.pentaho.di.core.vfs.KettleVFS#getInputStream()

The following examples show how to use org.pentaho.di.core.vfs.KettleVFS#getInputStream() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SwtSvgImageUtil.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
/**
 * Internal image loading from Kettle's VFS.
 */
private static SwtUniversalImage loadFromSimpleVFS( Display display, String location ) {
  try {
    InputStream s = KettleVFS.getInputStream( location );
    if ( s == null ) {
      return null;
    }
    try {
      return loadImage( display, s, location );
    } finally {
      IOUtils.closeQuietly( s );
    }
  } catch ( KettleFileException e ) {
    // do nothing. try to load next
  }
  return null;
}
 
Example 2
Source File: SwtSvgImageUtil.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
/**
 * Internal image loading from Kettle's user.dir VFS.
 */
private static SwtUniversalImage loadFromBasedVFS( Display display, String location ) {
  try {
    FileObject imageFileObject = KettleVFS.getInstance().getFileSystemManager().resolveFile( base, location );
    InputStream s = KettleVFS.getInputStream( imageFileObject );
    if ( s == null ) {
      return null;
    }
    try {
      return loadImage( display, s, location );
    } finally {
      IOUtils.closeQuietly( s );
    }
  } catch ( FileSystemException ex ) {
    return null;
  }
}
 
Example 3
Source File: ImageUtil.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
/**
 * TODO: GUI resources.
 */
public static Image getImage( Display display, String location ) {
  // TODO: find other instances of getImage (plugin, steps) and transition them to new model through an laf manager
  try {
    InputStream is = KettleVFS.getInputStream( location );
    Image im = new Image( display, is );
    is.close();
    return im;
  } catch ( Exception e ) {
    try {
      return new Image( display, ImageUtil.class.getClassLoader().getResourceAsStream( location ) );
    } catch ( Exception npe ) {
      throw new RuntimeException( "Unable to load image with name [" + location + "]", e );
    }
  }

}
 
Example 4
Source File: YamlReader.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void loadFile( String filename ) throws Exception {
  this.filename = filename;
  this.file = KettleVFS.getFileObject( filename );

  InputStream is = null;
  try {
    is = KettleVFS.getInputStream( getFile() );

    for ( Object data : getYaml().loadAll( is ) ) {
      documents.add( data );
      this.useMap = ( data instanceof Map );
    }

    this.documenti = documents.iterator();

  } finally {
    if ( is != null ) {
      is.close();
    }
  }
}
 
Example 5
Source File: CsvInputDialog.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
/**'
 * Returns the {@link InputStream} corresponding to the csv file, or null if the file cannot be read.
 * @return the {@link InputStream} corresponding to the csv file, or null if the file cannot be read
 */
private InputStream getInputStream( final CsvInputMeta meta ) {
  InputStream inputStream = null;
  try {
    final String filename = transMeta.environmentSubstitute( meta.getFilename() );

    final FileObject fileObject = KettleVFS.getFileObject( filename );
    if ( !( fileObject instanceof LocalFile ) ) {
      // We can only use NIO on local files at the moment, so that's what we
      // limit ourselves to.
      //
      throw new KettleException( BaseMessages.getString( PKG, "CsvInput.Log.OnlyLocalFilesAreSupported" ) );
    }

    inputStream = KettleVFS.getInputStream( fileObject );
  } catch ( final Exception e ) {
    logError( BaseMessages.getString( PKG, "CsvInputDialog.ErrorGettingFileDesc.DialogMessage" ), e );
  }
  return inputStream;
}
 
Example 6
Source File: SlaveServer.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
/**
 * Send an exported archive over to this slave server
 *
 * @param filename The archive to send
 * @param type     The type of file to add to the slave server (AddExportServlet.TYPE_*)
 * @param load     The filename to load in the archive (the .kjb or .ktr)
 * @return the XML of the web result
 * @throws Exception in case something goes awry
 */
public String sendExport( String filename, String type, String load ) throws Exception {
  // Request content will be retrieved directly from the input stream
  try ( InputStream is = KettleVFS.getInputStream( KettleVFS.getFileObject( filename ) ) ) {
    // Execute request
    HttpPost method = buildSendExportMethod( type, load, is );
    try {
      return executeAuth( method );
    } finally {
      // Release current connection to the connection pool once you are done
      method.releaseConnection();
      if ( log.isDetailed() ) {
        log.logDetailed( BaseMessages.getString( PKG, "SlaveServer.DETAILED_SentExportToService",
            RegisterPackageServlet.CONTEXT_PATH, environmentSubstitute( hostname ) ) );
      }
    }
  }
}
 
Example 7
Source File: SwingSvgImageUtil.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
/**
 * Internal image loading from Kettle's user.dir VFS.
 */
private static SwingUniversalImage loadFromBasedVFS( String location ) {
  try {
    FileObject imageFileObject = KettleVFS.getInstance().getFileSystemManager().resolveFile( base, location );
    InputStream s = KettleVFS.getInputStream( imageFileObject );
    if ( s == null ) {
      return null;
    }
    try {
      return loadImage( s, location );
    } finally {
      IOUtils.closeQuietly( s );
    }
  } catch ( FileSystemException ex ) {
    return null;
  }
}
 
Example 8
Source File: SwingSvgImageUtil.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
/**
 * Internal image loading from Kettle's VFS.
 */
private static SwingUniversalImage loadFromSimpleVFS( String location ) {
  try {
    InputStream s = KettleVFS.getInputStream( location );
    if ( s == null ) {
      return null;
    }
    try {
      return loadImage( s, location );
    } finally {
      IOUtils.closeQuietly( s );
    }
  } catch ( KettleFileException e ) {
    // do nothing. try to load next
  }
  return null;
}
 
Example 9
Source File: TextFileInputIT.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
/**
 * Verify that lines are properly identified when parsing a mixed format file.
 */
public void testGetLine_FILE_FORMAT_MIXED() throws Exception {
  String fileLocation = "src/it/resources/example.csv";
  InputStream inputStream = KettleVFS.getInputStream( fileLocation );
  InputStreamReader reader = new InputStreamReader( inputStream );
  // Grab the first line and verify it only has 4 tokens instead of 24 (the total tokens in the file)
  StringBuilder stringBuilder = new StringBuilder( 1000 );
  String line = TextFileInput.getLine( null, reader, TextFileInputMeta.FILE_FORMAT_MIXED, stringBuilder );
  CSVTokenizer csvt = new CSVTokenizer( line, ",", "\"" );
  assertEquals( 4, csvt.countTokens() );
}
 
Example 10
Source File: TextFileInputDialog.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@Override
public InputStream getInputStream( final CsvInputAwareMeta meta ) {
  InputStream fileInputStream;
  CompressionInputStream inputStream = null;
  try {
    FileObject fileObject = meta.getHeaderFileObject( getTransMeta() );
    fileInputStream = KettleVFS.getInputStream( fileObject );
    CompressionProvider provider = CompressionProviderFactory.getInstance().createCompressionProviderInstance(
      ( (TextFileInputMeta) meta ).content.fileCompression );
    inputStream = provider.createInputStream( fileInputStream );
  } catch ( final Exception e ) {
    logError( BaseMessages.getString( "FileInputDialog.ErrorGettingFileDesc.DialogMessage" ), e );
  }
  return inputStream;
}
 
Example 11
Source File: CubeInput.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) {
  meta = (CubeInputMeta) smi;
  data = (CubeInputData) sdi;

  if ( super.init( smi, sdi ) ) {
    try {
      String filename = environmentSubstitute( meta.getFilename() );

      // Add filename to result filenames ?
      if ( meta.isAddResultFile() ) {
        ResultFile resultFile =
          new ResultFile(
            ResultFile.FILE_TYPE_GENERAL, KettleVFS.getFileObject( filename, getTransMeta() ),
            getTransMeta().getName(), toString() );
        resultFile.setComment( "File was read by a Cube Input step" );
        addResultFile( resultFile );
      }

      data.fis = KettleVFS.getInputStream( filename, this );
      data.zip = new GZIPInputStream( data.fis );
      data.dis = new DataInputStream( data.zip );

      try {
        data.meta = new RowMeta( data.dis );
        return true;
      } catch ( KettleFileException kfe ) {
        logError( BaseMessages.getString( PKG, "CubeInput.Log.UnableToReadMetadata" ), kfe );
        return false;
      }
    } catch ( Exception e ) {
      logError( BaseMessages.getString( PKG, "CubeInput.Log.ErrorReadingFromDataCube" ), e );
    }
  }
  return false;
}
 
Example 12
Source File: ExcelWriterStep.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
/**
 * Copies a VFS File
 *
 * @param in  the source file object
 * @param out the destination file object
 * @throws KettleException
 */
public static void copyFile( FileObject in, FileObject out ) throws KettleException {
  try ( BufferedInputStream fis = new BufferedInputStream( KettleVFS.getInputStream( in ) );
        BufferedOutputStream fos = new BufferedOutputStream( KettleVFS.getOutputStream( out, false ) ) ) {
    byte[] buf = new byte[ 1024 * 1024 ]; // copy in chunks of 1 MB
    int i = 0;
    while ( ( i = fis.read( buf ) ) != -1 ) {
      fos.write( buf, 0, i );
    }
  } catch ( Exception e ) {
    throw new KettleException( e );
  }
}
 
Example 13
Source File: XBaseInput.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
private void openNextFile() throws KettleException {
  // Close the last file before opening the next...
  if ( data.xbi != null ) {
    logBasic( BaseMessages.getString( PKG, "XBaseInput.Log.FinishedReadingRecords" ) );
    data.xbi.close();
  }

  // Replace possible environment variables...
  data.file_dbf = data.files.getFile( data.fileNr );
  data.fileNr++;

  try {
    data.xbi = new XBase( log, KettleVFS.getInputStream( data.file_dbf ) );
    data.xbi.setDbfFile( data.file_dbf.getName().getURI() );
    data.xbi.open();
    if ( !Utils.isEmpty( meta.getCharactersetName() ) ) {
      data.xbi.getReader().setCharactersetName( meta.getCharactersetName() );
    }

    logBasic( BaseMessages.getString( PKG, "XBaseInput.Log.OpenedXBaseFile" ) + " : [" + data.xbi + "]" );
    data.fields = data.xbi.getFields();

    // Add this to the result file names...
    ResultFile resultFile =
      new ResultFile( ResultFile.FILE_TYPE_GENERAL, data.file_dbf, getTransMeta().getName(), getStepname() );
    resultFile.setComment( BaseMessages.getString( PKG, "XBaseInput.ResultFile.Comment" ) );
    addResultFile( resultFile );
  } catch ( Exception e ) {
    logError( BaseMessages.getString( PKG, "XBaseInput.Log.Error.CouldNotOpenXBaseFile1" )
      + data.file_dbf + BaseMessages.getString( PKG, "XBaseInput.Log.Error.CouldNotOpenXBaseFile2" )
      + e.getMessage() );
    throw new KettleException( e );
  }
}
 
Example 14
Source File: SharedObjects.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
private boolean copyFile( String src, String dest ) throws KettleFileException, IOException {
  FileObject srcFile = getFileObjectFromKettleVFS( src );
  FileObject destFile = getFileObjectFromKettleVFS( dest );
  try ( InputStream in = KettleVFS.getInputStream( srcFile );
      OutputStream out = KettleVFS.getOutputStream( destFile, false ) ) {
    IOUtils.copy( in, out );
  }
  return true;
}
 
Example 15
Source File: JobEntrySQL.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public String buildSqlFromFile() throws KettleDatabaseException {
  if ( sqlFilename == null ) {
    throw new KettleDatabaseException( BaseMessages.getString( PKG, "JobSQL.NoSQLFileSpecified" ) );
  }

  String realFilename = environmentSubstitute( sqlFilename );
  try ( FileObject sqlFile = KettleVFS.getFileObject( realFilename, this ) ) {
    if ( !sqlFile.exists() ) {
      logError( BaseMessages.getString( PKG, "JobSQL.SQLFileNotExist", realFilename ) );
      throw new KettleDatabaseException( BaseMessages.getString(
        PKG, "JobSQL.SQLFileNotExist", realFilename ) );
    }
    if ( isDetailed() ) {
      logDetailed( BaseMessages.getString( PKG, "JobSQL.SQLFileExists", realFilename ) );
    }

    try ( InputStream inputStream = KettleVFS.getInputStream( sqlFile ) ) {
      InputStreamReader bufferedStream = new InputStreamReader( new BufferedInputStream( inputStream, 500 ) );

      BufferedReader buff = new BufferedReader( bufferedStream );
      String sLine;
      StringBuilder sqlBuilder = new StringBuilder( Const.CR );

      while ( ( sLine = buff.readLine() ) != null ) {
        if ( Utils.isEmpty( sLine ) ) {
          sqlBuilder.append( Const.CR );
        } else {
          sqlBuilder.append( Const.CR ).append( sLine );
        }
      }
      return sqlBuilder.toString();
    }
  } catch ( Exception e ) {
    throw new KettleDatabaseException( BaseMessages.getString( PKG, "JobSQL.ErrorRunningSQLfromFile" ), e );
  }
}
 
Example 16
Source File: DataSetCsvGroup.java    From pentaho-pdi-dataset with Apache License 2.0 5 votes vote down vote up
public static final List<Object[]> getAllRows( LogChannelInterface log, DataSetGroup group, DataSet dataSet ) throws KettleException {
  RowMetaInterface setRowMeta = dataSet.getSetRowMeta( true );
  setValueFormats( setRowMeta );
  String dataSetFilename = getDataSetFilename( group, dataSet.getTableName() );
  List<Object[]> rows = new ArrayList<>();
  final ValueMetaString constantValueMeta = new ValueMetaString( "constant" );

  try {
    FileObject file = KettleVFS.getFileObject( dataSetFilename );
    if ( !file.exists() ) {
      // This is fine.  We haven't put rows in yet.
      //
      return rows;
    }

    try (
      Reader reader = new InputStreamReader( new BufferedInputStream( KettleVFS.getInputStream( file ) ) );
      CSVParser csvParser = new CSVParser( reader, getCsvFormat( setRowMeta ) );
    ) {
      for ( CSVRecord csvRecord : csvParser ) {
        if ( csvRecord.getRecordNumber() > 1 ) {
          Object[] row = RowDataUtil.allocateRowData( setRowMeta.size() );
          for ( int i = 0; i < setRowMeta.size(); i++ ) {
            ValueMetaInterface valueMeta = setRowMeta.getValueMeta( i ).clone();
            constantValueMeta.setConversionMetadata( valueMeta );
            String value = csvRecord.get( i );
            row[ i ] = valueMeta.convertData( constantValueMeta, value );
          }
          rows.add( row );
        }
      }
    }
    return rows;
  } catch ( Exception e ) {
    throw new KettleException( "Unable to get all rows for CSV data set '" + dataSet.getName() + "'", e );
  }
}
 
Example 17
Source File: GetFilesRowsCount.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
private void getRowNumber() throws KettleException {
  try {

    if ( data.file.getType() == FileType.FILE ) {
      data.fr = KettleVFS.getInputStream( data.file );
      // Avoid method calls - see here:
      // http://java.sun.com/developer/technicalArticles/Programming/PerfTuning/
      byte[] buf = new byte[8192]; // BufferedaInputStream default buffer size
      int n;
      boolean prevCR = false;
      while ( ( n = data.fr.read( buf ) ) != -1 ) {
        for ( int i = 0; i < n; i++ ) {
          data.foundData = true;
          if ( meta.getRowSeparatorFormat().equals( "CRLF" ) ) {
            // We need to check for CRLF
            if ( buf[i] == '\r' || buf[i] == '\n' ) {
              if ( buf[i] == '\r' ) {
                // we have a carriage return
                // keep track of it..maybe we will have a line feed right after :-)
                prevCR = true;
              } else if ( buf[i] == '\n' ) {
                // we have a line feed
                // let's see if we had previously a carriage return
                if ( prevCR ) {
                  // we have a carriage return followed by a line feed
                  data.rownr++;
                  // Maybe we won't have data after
                  data.foundData = false;
                  prevCR = false;
                }
              }
            } else {
              // we have another char (other than \n , \r)
              prevCR = false;
            }

          } else {
            if ( buf[i] == data.separator ) {
              data.rownr++;
              // Maybe we won't have data after
              data.foundData = false;
            }
          }
        }
      }
    }
    if ( isDetailed() ) {
      logDetailed( BaseMessages.getString( PKG, "GetFilesRowsCount.Log.RowsInFile", data.file.toString(), ""
        + data.rownr ) );
    }
  } catch ( Exception e ) {
    throw new KettleException( e );
  } finally {
    // Close inputstream - not used except for counting
    if ( data.fr != null ) {
      BaseStep.closeQuietly( data.fr );
      data.fr = null;
    }
  }

}
 
Example 18
Source File: LogWriter.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
/**
 * This is not thread safe: please try to get the file appender yourself using the static constructor and work from there
 */
public InputStream getFileInputStream() throws IOException {
  return KettleVFS.getInputStream( fileAppender.getFile() );
}
 
Example 19
Source File: LoopNodesImportProgressDialog.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
@SuppressWarnings( "unchecked" )
private String[] doScan( IProgressMonitor monitor ) throws Exception {
  monitor.beginTask( BaseMessages.getString( PKG, "GetXMLDateLoopNodesImportProgressDialog.Task.ScanningFile",
      filename ), 1 );

  SAXReader reader = XMLParserFactoryProducer.getSAXReader( null );
  monitor.worked( 1 );
  if ( monitor.isCanceled() ) {
    return null;
  }
  // Validate XML against specified schema?
  if ( meta.isValidating() ) {
    reader.setValidation( true );
    reader.setFeature( "http://apache.org/xml/features/validation/schema", true );
  } else {
    // Ignore DTD
    reader.setEntityResolver( new IgnoreDTDEntityResolver() );
  }
  monitor.worked( 1 );
  monitor
      .beginTask( BaseMessages.getString( PKG, "GetXMLDateLoopNodesImportProgressDialog.Task.ReadingDocument" ), 1 );
  if ( monitor.isCanceled() ) {
    return null;
  }
  InputStream is = null;
  try {
    Document document = null;
    if ( !Utils.isEmpty( filename ) ) {
      is = KettleVFS.getInputStream( filename );
      document = reader.read( is, encoding );
    } else {
      if ( !Utils.isEmpty( xml ) ) {
        document = reader.read( new StringReader( xml ) );
      } else {
        document = reader.read( new URL( url ) );
      }
    }
    monitor.worked( 1 );
    monitor.beginTask( BaseMessages.getString( PKG, "GetXMLDateLoopNodesImportProgressDialog.Task.DocumentOpened" ),
        1 );
    monitor.worked( 1 );
    monitor.beginTask( BaseMessages.getString( PKG, "GetXMLDateLoopNodesImportProgressDialog.Task.ReadingNode" ), 1 );

    if ( monitor.isCanceled() ) {
      return null;
    }
    List<Node> nodes = document.selectNodes( document.getRootElement().getName() );
    monitor.worked( 1 );
    monitor.subTask( BaseMessages.getString( PKG, "GetXMLDateLoopNodesImportProgressDialog.Task.FetchNodes" ) );

    if ( monitor.isCanceled() ) {
      return null;
    }
    for ( Node node : nodes ) {
      if ( monitor.isCanceled() ) {
        return null;
      }
      if ( !listpath.contains( node.getPath() ) ) {
        nr++;
        monitor.subTask( BaseMessages.getString( PKG, "GetXMLDateLoopNodesImportProgressDialog.Task.FetchNodes",
            String.valueOf( nr ) ) );
        monitor.subTask( BaseMessages.getString( PKG, "GetXMLDateLoopNodesImportProgressDialog.Task.AddingNode", node
            .getPath() ) );
        listpath.add( node.getPath() );
        addLoopXPath( node, monitor );
      }
    }
    monitor.worked( 1 );
  } finally {
    try {
      if ( is != null ) {
        is.close();
      }
    } catch ( Exception e ) { /* Ignore */
    }
  }
  String[] list_xpath = listpath.toArray( new String[listpath.size()] );

  monitor.setTaskName( BaseMessages.getString( PKG, "GetXMLDateLoopNodesImportProgressDialog.Task.NodesReturned" ) );

  monitor.done();

  return list_xpath;

}
 
Example 20
Source File: XMLInputFieldsImportProgressDialog.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
@SuppressWarnings( "unchecked" )
private RowMetaAndData[] doScan( IProgressMonitor monitor ) throws Exception {
  monitor.beginTask( BaseMessages.getString( PKG, "GetXMLDateLoopNodesImportProgressDialog.Task.ScanningFile",
      filename ), 1 );

  SAXReader reader = XMLParserFactoryProducer.getSAXReader( null );
  monitor.worked( 1 );
  if ( monitor.isCanceled() ) {
    return null;
  }
  // Validate XML against specified schema?
  if ( meta.isValidating() ) {
    reader.setValidation( true );
    reader.setFeature( "http://apache.org/xml/features/validation/schema", true );
  } else {
    // Ignore DTD
    reader.setEntityResolver( new IgnoreDTDEntityResolver() );
  }
  monitor.worked( 1 );
  monitor
      .beginTask( BaseMessages.getString( PKG, "GetXMLDateLoopNodesImportProgressDialog.Task.ReadingDocument" ), 1 );
  if ( monitor.isCanceled() ) {
    return null;
  }
  InputStream is = null;
  try {

    Document document = null;
    if ( !Utils.isEmpty( filename ) ) {
      is = KettleVFS.getInputStream( filename );
      document = reader.read( is, encoding );
    } else {
      if ( !Utils.isEmpty( xml ) ) {
        document = reader.read( new StringReader( xml ) );
      } else {
        document = reader.read( new URL( url ) );
      }
    }

    monitor.worked( 1 );
    monitor.beginTask( BaseMessages.getString( PKG, "GetXMLDateLoopNodesImportProgressDialog.Task.DocumentOpened" ),
        1 );
    monitor.worked( 1 );
    monitor.beginTask( BaseMessages.getString( PKG, "GetXMLDateLoopNodesImportProgressDialog.Task.ReadingNode" ), 1 );

    if ( monitor.isCanceled() ) {
      return null;
    }
    List<Node> nodes = document.selectNodes( this.loopXPath );
    monitor.worked( 1 );
    monitor.subTask( BaseMessages.getString( PKG, "GetXMLDateLoopNodesImportProgressDialog.Task.FetchNodes" ) );

    if ( monitor.isCanceled() ) {
      return null;
    }
    for ( Node node : nodes ) {
      if ( monitor.isCanceled() ) {
        return null;
      }

      nr++;
      monitor.subTask( BaseMessages.getString( PKG, "GetXMLDateLoopNodesImportProgressDialog.Task.FetchNodes", String
          .valueOf( nr ) ) );
      monitor.subTask( BaseMessages.getString( PKG, "GetXMLDateLoopNodesImportProgressDialog.Task.FetchNodes", node
          .getPath() ) );
      setNodeField( node, monitor );
      childNode( node, monitor );

    }
    monitor.worked( 1 );
  } finally {
    try {
      if ( is != null ) {
        is.close();
      }
    } catch ( Exception e ) { /* Ignore */
    }
  }

  RowMetaAndData[] listFields = fieldsList.toArray( new RowMetaAndData[fieldsList.size()] );

  monitor.setTaskName( BaseMessages.getString( PKG, "GetXMLDateLoopNodesImportProgressDialog.Task.NodesReturned" ) );

  monitor.done();

  return listFields;

}