Java Code Examples for org.apache.commons.io.FilenameUtils#concat()

The following examples show how to use org.apache.commons.io.FilenameUtils#concat() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: CifarLoader.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
public CifarLoader(int height, int width, int channels, ImageTransform imgTransform, boolean train,
                boolean useSpecialPreProcessCifar, File fullDir, long seed, boolean shuffle) {
    super(height, width, channels, imgTransform);
    this.height = height;
    this.width = width;
    this.channels = channels;
    this.train = train;
    this.useSpecialPreProcessCifar = useSpecialPreProcessCifar;
    this.seed = seed;
    this.shuffle = shuffle;

    if (fullDir == null) {
        this.fullDir = getDefaultDirectory();
    } else {
        this.fullDir = fullDir;
    }
    meanVarPath = new File(this.fullDir, "meanVarPath.txt");
    trainFilesSerialized = FilenameUtils.concat(this.fullDir.toString(), "cifar_train_serialized");
    testFilesSerialized = FilenameUtils.concat(this.fullDir.toString(), "cifar_test_serialized.ser");

    load();
}
 
Example 2
Source File: ExtractorUtils.java    From gemfirexd-oss with Apache License 2.0 6 votes vote down vote up
/****
 * Gets the size of the largest disk-store in a directory 
 * @param diskStores List of diskstores
 * @param diskStoreDirPath path of the directory where the disk-store are created
 * @return size of the largest disk-store
 */
public static long getMaxDiskStoreSizeInDir(List<DiskStoreImpl> diskStores, String diskStoreDirPath) {
  File diskStoreDir = new File(diskStoreDirPath);
  long maxDiskStoreSizeOnDisk =0;
  for (DiskStoreImpl diskStore : diskStores) {
    String[] fileNames = diskStoreDir.list(new DiskStoreNameFilter(diskStore.getName()));
    long diskStoreSize = 0;

    if (fileNames != null && fileNames.length > 0) {
      for (String fileName : fileNames) {
        File file = new File(FilenameUtils.concat(diskStoreDirPath, fileName));
        if (file.exists()) {
          diskStoreSize += FileUtils.sizeOf(file); 
        }
      }
    }

    if (maxDiskStoreSizeOnDisk < diskStoreSize) {
      maxDiskStoreSizeOnDisk = diskStoreSize;
    }
  }
  return maxDiskStoreSizeOnDisk;
}
 
Example 3
Source File: XdsIniFile.java    From xds-ide with Eclipse Public License 1.0 6 votes vote down vote up
XdsIniFile (Sdk sdk) {
    content = new HashMap<String, HashMap<String, String>>();

    String compiler = sdk.getCompilerExecutablePath();
    
    String dir = null;
    if (compiler != null) {
    	dir = (new File(compiler)).getParent();
    }
    if (dir == null) {
    	dir = FilenameUtils.concat(sdk.getSdkHomePath(), "bin"); //$NON-NLS-1$
    }
    xdsdir = dir;
    
    xdsname = (compiler == null) 
            ? ""  //$NON-NLS-1$
            : FilenameUtils.getBaseName(compiler);
    readXdsIniFile(FilenameUtils.getFullPath(compiler));
}
 
Example 4
Source File: SparkExport.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
public static void exportCSVSequenceLocal(File baseDir, JavaRDD<List<List<Writable>>> sequences, long seed)
                throws Exception {
    baseDir.mkdirs();
    if (!baseDir.isDirectory())
        throw new IllegalArgumentException("File is not a directory: " + baseDir.toString());
    String baseDirStr = baseDir.toString();

    List<String> fileContents = sequences.map(new SequenceToStringFunction(",")).collect();
    if (!(fileContents instanceof ArrayList))
        fileContents = new ArrayList<>(fileContents);
    Collections.shuffle(fileContents, new Random(seed));

    int i = 0;
    for (String s : fileContents) {
        String path = FilenameUtils.concat(baseDirStr, i + ".csv");
        File f = new File(path);
        FileUtils.writeStringToFile(f, s);
        i++;
    }
}
 
Example 5
Source File: TestBeanFirmwareUpdate.java    From bean-sdk-android with MIT License 6 votes vote down vote up
public FirmwareBundle getFirmwareBundle(String hardwareRevision) throws Exception {

        Log.i(TAG, "Finding firmware bundle for hardware version: " + hardwareRevision);

        String bundlePath = bundlePathForHardwareRevision(hardwareRevision);
        List<FirmwareImage> fwImages = new ArrayList<>();
        for (String imageFileName : filesInAssetDir(getContext(), bundlePath)) {
            String imagePath = FilenameUtils.concat(bundlePath, imageFileName);
            try {
                InputStream imageStream = getContext().getAssets().open(imagePath);
                FirmwareImage image = new FirmwareImage(IOUtils.toByteArray(imageStream), imageFileName);
                fwImages.add(image);
            } catch (IOException | ImageParsingException e) {
                throw new Exception(e.getMessage());
            }
        }

        FirmwareBundle bundle = new FirmwareBundle(fwImages);
        Log.i(TAG, "Found firmware bundle: " + bundle.version());
        return bundle;
    }
 
Example 6
Source File: SparkExport.java    From DataVec with Apache License 2.0 6 votes vote down vote up
public static void exportCSVSequenceLocalNoShuffling(File baseDir, JavaRDD<List<List<Writable>>> sequences,
                String delimiter, String filePrefix, String fileExtension) throws Exception {
    baseDir.mkdirs();
    if (!baseDir.isDirectory())
        throw new IllegalArgumentException("File is not a directory: " + baseDir.toString());
    String baseDirStr = baseDir.toString();

    List<String> fileContents = sequences.map(new SequenceToStringFunction(delimiter)).collect();
    if (!(fileContents instanceof ArrayList))
        fileContents = new ArrayList<>(fileContents);

    int i = 0;
    for (String s : fileContents) {
        String path = FilenameUtils.concat(baseDirStr, filePrefix + "_" + i + "." + fileExtension);
        File f = new File(path);
        FileUtils.writeStringToFile(f, s);
        i++;
    }
}
 
Example 7
Source File: ConfigurationUtils.java    From gemfirexd-oss with Apache License 2.0 6 votes vote down vote up
public static void writeConfig(String configDirPath, Configuration configuration, boolean forceWrite) throws Exception {
  File configDir = new File(configDirPath);
  if (!configDir.exists()) {
    configDir.mkdirs();
  }
  String dirPath = FilenameUtils.concat(configDirPath, configuration.getConfigName());
  File file = new File(dirPath);
  if (!file.exists()) {
    if (!file.mkdir()) {
      //TODO : Throw some exception instead of quietly returning 
      throw new Exception("Cannot create directory on this machine");
    }
  }
    
  if(configuration.isPropertiesFileChanged() || forceWrite) {
    writePropeties(dirPath, configuration);
  }
  if (configuration.isCacheXmlModified() || forceWrite) {
    writeCacheXml(dirPath, configuration);
  }
}
 
Example 8
Source File: GemFireXDDataExtractorImpl.java    From gemfirexd-oss with Apache License 2.0 5 votes vote down vote up
public void consumeProperties() throws IOException {
  this.propFilePath = extractorProperties.getProperty(PROPERTY_FILE_ARG);
  this.outputDirectoryPath = extractorProperties.getProperty(EXTRACTOR_OUTPUT_DIR_OPT);
  this.logFilePath = FilenameUtils.concat(getOutputDirectory() ,extractorProperties.getProperty(LOG_FILE_OPT, DEFAULT_SALVAGER_LOG_FILE));
  this.logLevelString = extractorProperties.getProperty(LOG_LEVEL_OPT, Level.INFO.getName());
  this.stringDelimiter = extractorProperties.getProperty(STRING_DELIMITER);
  this.extractInServerDir = Boolean.valueOf(extractorProperties.getProperty(EXTRACT_IN_SERVER_OPT));
  this.showHelp = Boolean.valueOf(extractorProperties.getProperty(HELP_OPT));
  String ddlFilePath = extractorProperties.getProperty(USE_DDL_OPT);
  if (ddlFilePath != null && !ddlFilePath.isEmpty()) {
    this.useSingleDDL = true;
  }
  this.userName = extractorProperties.getProperty(USER_NAME_OPT);

  String numThreads = extractorProperties.getProperty(NUM_THREADS_OPT);
  if (StringUtils.isBlank(numThreads)) {
    this.userOverrideNumThreads = false;
  } else {
    int userNumThreads = -1; 
    try {
      userNumThreads = Integer.valueOf(numThreads);
      this.userNumThreads = userNumThreads;
      this.userOverrideNumThreads = true;
      if (this.userNumThreads < 1) {
        this.userNumThreads = 1;
      }
    } catch (NumberFormatException nfe) {
      System.out.println("Invalid value for " + NUM_THREADS_OPT);
      this.userOverrideNumThreads = false;
    }
  }
}
 
Example 9
Source File: GemFireXDDataExtractorImpl.java    From gemfirexd-oss with Apache License 2.0 5 votes vote down vote up
public Properties createConnectionProperties(boolean enableOffHeap) {
  String outputDir = ".";
  try {
    outputDir = getOutputDirectory();
  }
  catch (IOException e) {
  }

  String gemfirelog = FilenameUtils.concat(outputDir, "system");
  Properties props = new Properties();
  if (setPropertyIfAbsent(props, DistributionConfig.LOG_FILE_NAME, gemfirelog
      + ".log")) {
    // if no log-file property then also set the system property for
    // gemfirexd log-file that will also get used for JDBC clients
    setPropertyIfAbsent(null, GfxdConstants.GFXD_LOG_FILE, gemfirelog
        + ".log");
  }

  setPropertyIfAbsent(null, GfxdConstants.GFXD_CLIENT_LOG_FILE, gemfirelog
      + "-client.log");
  setPropertyIfAbsent(props, "mcast-port", "0");
  // set default partitioned policy if not set
  setPropertyIfAbsent(props, Attribute.TABLE_DEFAULT_PARTITIONED, "true");

  //Set the loner VM to use the off heap memory, to ensure creation of off heap table during DDL replay.
  if (enableOffHeap) {
    setPropertyIfAbsent(props, DistributionConfig.OFF_HEAP_MEMORY_SIZE_NAME, "1m");
  }

  if (!StringUtils.isEmpty(this.userName)) {
    setPropertyIfAbsent(props, Attribute.USERNAME_ATTR, userName);
  }
  return props;
}
 
Example 10
Source File: XdsPluginUpdater.java    From xds-ide with Eclipse Public License 1.0 5 votes vote down vote up
private static Collection<File> getXdsPluginJars(UpdateDirDescriptor desc) {
	Collection<File> fileList = new ArrayList<File>();
	for (PluginUpdate pluginUpdate : desc.pluginUpdates) {
		File pluginFile = new File(FilenameUtils.concat(desc.updateDirPath, pluginUpdate.pluginLocation));
		fileList.add(pluginFile);
	}
	return fileList;
}
 
Example 11
Source File: RecordReaderDataSetiteratorTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testSequenceRecordReaderReset() throws Exception {
    File rootDir = temporaryFolder.newFolder();
    //need to manually extract
    for (int i = 0; i < 3; i++) {
        FileUtils.copyFile(Resources.asFile(String.format("csvsequence_%d.txt", i)), new File(rootDir, String.format("csvsequence_%d.txt", i)));
        FileUtils.copyFile(Resources.asFile(String.format("csvsequencelabels_%d.txt", i)), new File(rootDir, String.format("csvsequencelabels_%d.txt", i)));
    }
    String featuresPath = FilenameUtils.concat(rootDir.getAbsolutePath(), "csvsequence_%d.txt");
    String labelsPath = FilenameUtils.concat(rootDir.getAbsolutePath(), "csvsequencelabels_%d.txt");

    SequenceRecordReader featureReader = new CSVSequenceRecordReader(1, ",");
    SequenceRecordReader labelReader = new CSVSequenceRecordReader(1, ",");
    featureReader.initialize(new NumberedFileInputSplit(featuresPath, 0, 2));
    labelReader.initialize(new NumberedFileInputSplit(labelsPath, 0, 2));

    SequenceRecordReaderDataSetIterator iter =
                    new SequenceRecordReaderDataSetIterator(featureReader, labelReader, 1, 4, false);

    assertEquals(3, iter.inputColumns());
    assertEquals(4, iter.totalOutcomes());

    int nResets = 5;
    for (int i = 0; i < nResets; i++) {
        iter.reset();
        int count = 0;
        while (iter.hasNext()) {
            DataSet ds = iter.next();
            INDArray features = ds.getFeatures();
            INDArray labels = ds.getLabels();
            assertArrayEquals(new long[] {1, 3, 4}, features.shape());
            assertArrayEquals(new long[] {1, 4, 4}, labels.shape());
            count++;
        }
        assertEquals(3, count);
    }
}
 
Example 12
Source File: WkHtmlToPdfExecutor.java    From website with GNU Affero General Public License v3.0 5 votes vote down vote up
public File execute(String htmlLocation, String cssFile) throws IOException, InterruptedException {
	String newPdfFilename = getNewPdfFilename(htmlLocation);
	if (!StringUtils.startsWith(htmlLocation, "http")) {
		htmlLocation = FilenameUtils.concat(htmlFolder, htmlLocation);
		File f = new File(htmlLocation);
		if(!f.exists()) { 
			throw new IOException("File '" + htmlLocation + "' not found");
		}
		htmlLocation = "file:///" + htmlLocation;
	}
	//String[] cmd = { wkhtmltopdfExecutableLocation,"--user-style-sheet", "\"file:///" + FilenameUtils.concat(htmlToPdfCssFolder, cssFile) + "\"", htmlLocation, newPdfFilename };
	//String cmd = wkhtmltopdfExecutableLocation + " --user-style-sheet " + "\"" + FilenameUtils.concat(htmlToPdfCssFolder, cssFile) + "\" " + htmlLocation + " " + newPdfFilename;
	String cmd = wkhtmltopdfExecutableLocation + " " + FilenameUtils.concat(htmlToPdfCssFolder, cssFile) + " " + htmlLocation + " " + newPdfFilename;
	
	//logger.debug(Arrays.toString(cmd));
	logger.debug(cmd);
	Runtime runtime = Runtime.getRuntime();
       Process p = runtime.exec(cmd);
       BufferedReader input = new BufferedReader(new InputStreamReader(p.getErrorStream()));

       String line=null;

       List<String> output = new ArrayList<String>();
       while((line=input.readLine()) != null) {
       	output.add(line);
       }
       
       int exitVal = p.waitFor();
       if (exitVal != 0) {
       	throw new IOException(output.get(output.size() -1));
       }
       return new File(newPdfFilename);
}
 
Example 13
Source File: TestBeanAdvanced.java    From bean-sdk-android with MIT License 4 votes vote down vote up
@Suppress
public void testBeanSketchUpload() throws Exception {
    final Bean bean = discoverBean();
    synchronousConnect(bean);
    String hwVersion = getDeviceInformation(bean).hardwareVersion();

    String hexPath = null;
    for (String filename : filesInAssetDir(getContext(), "bean_fw_advanced_callbacks")) {
        if (FilenameUtils.getExtension(filename).equals("hex")) {
            String[] pieces = FilenameUtils.getBaseName(filename).split("_");
            String hexHW = pieces[pieces.length - 1];
            if (hexHW.equals(hwVersion)) {
                hexPath = FilenameUtils.concat("bean_fw_advanced_callbacks", filename);
                break;
            }
        }
    }

    assertThat(hexPath).isNotNull();
    InputStream imageStream  = getContext().getAssets().open(hexPath);
    StringWriter writer = new StringWriter();
    IOUtils.copy(imageStream, writer);

    String timestamp = Long.toString(System.currentTimeMillis() / 1000);
    SketchHex sketchHex = SketchHex.create(timestamp, writer.toString());

    final CountDownLatch sketchLatch = new CountDownLatch(1);
    Callback<UploadProgress> onProgress = new Callback<UploadProgress>() {
        @Override
        public void onResult(UploadProgress result) {
            System.out.println("On Result: " + result);
        }
    };

    Runnable onComplete = new Runnable() {
        @Override
        public void run() {
            System.out.println("all done!");
            sketchLatch.countDown();
        }
    };

    bean.programWithSketch(sketchHex, onProgress, onComplete);
    sketchLatch.await(120, TimeUnit.SECONDS);

    SketchMetadata metadata = getSketchMetadata(bean);
    if (!metadata.hexName().equals(timestamp)) {
        fail(String.format("Unexpected Sketch name: %s != %s", metadata.hexName(), timestamp));
    }
}
 
Example 14
Source File: TestExport.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testBatchAndExportDataSetsFunction() throws Exception {
        String baseDir = System.getProperty("java.io.tmpdir");
        baseDir = FilenameUtils.concat(baseDir, "dl4j_spark_testBatchAndExport/");
        baseDir = baseDir.replaceAll("\\\\", "/");
        File f = new File(baseDir);
        if (f.exists())
            FileUtils.deleteDirectory(f);
        f.mkdir();
        f.deleteOnExit();
        int minibatchSize = 5;
        int nIn = 4;
        int nOut = 3;

        List<DataSet> dataSets = new ArrayList<>();
        dataSets.add(new DataSet(Nd4j.create(10, nIn), Nd4j.create(10, nOut))); //Larger than minibatch size -> tests splitting
        for (int i = 0; i < 98; i++) {
            if (i % 2 == 0) {
                dataSets.add(new DataSet(Nd4j.create(5, nIn), Nd4j.create(5, nOut)));
            } else {
                dataSets.add(new DataSet(Nd4j.create(1, nIn), Nd4j.create(1, nOut)));
                dataSets.add(new DataSet(Nd4j.create(1, nIn), Nd4j.create(1, nOut)));
                dataSets.add(new DataSet(Nd4j.create(3, nIn), Nd4j.create(3, nOut)));
            }
        }

        Collections.shuffle(dataSets, new Random(12345));

        JavaRDD<DataSet> rdd = sc.parallelize(dataSets);
        rdd = rdd.repartition(1); //For testing purposes (should get exactly 100 out, but maybe more with more partitions)


        JavaRDD<String> pathsRdd = rdd.mapPartitionsWithIndex(
                        new BatchAndExportDataSetsFunction(minibatchSize, "file:///" + baseDir), true);

        List<String> paths = pathsRdd.collect();
        assertEquals(100, paths.size());

        File[] files = f.listFiles();
        assertNotNull(files);

        int count = 0;
        for (File file : files) {
            if (!file.getPath().endsWith(".bin"))
                continue;
//            System.out.println(file);
            DataSet ds = new DataSet();
            ds.load(file);
            assertEquals(minibatchSize, ds.numExamples());

            count++;
        }

        assertEquals(100, count);

        FileUtils.deleteDirectory(f);
    }
 
Example 15
Source File: TextMessageReaderTest.java    From singer with Apache License 2.0 4 votes vote down vote up
@Test
public void testSingleLineTextMessages() throws Exception {
  String path = FilenameUtils.concat(getTempPath(), "text.log");
  TextLogger logger = new TextLogger(path);
  TextMessageReader reader = new TextMessageReader(
      path, 8192, MAX_MESSAGE_SIZE, Pattern.compile("^"));

  // Write first part of the first line.
  logger.logText(SINGLE_LINE_MESSAGE_0[0]);
  // We do not have full line yet. Return null and offset should be 0.
  assertNull(reader.readMessage(false));
  assertEquals(0L, reader.getByteOffset());

  // Write last part of the first line. Now we have a full line.
  logger.logText(SINGLE_LINE_MESSAGE_0[1]);
  // We did not see the first line of the next message. Return null and offset should be 0.
  assertNull(reader.readMessage(false));
  assertEquals(0L, reader.getByteOffset());

  // Get offset of the first byte of second message.
  long byteOffsetOfStartOfSecondMessage = logger.getByteOffset();

  // Write first part of the second line.
  logger.logText(SINGLE_LINE_MESSAGE_1[0]);
  // We have not got a full line of the second message. Return null and offset should be 0.
  assertNull(reader.readMessage(false));
  assertEquals(0L, reader.getByteOffset());

  // Write second part of the second line.
  logger.logText(SINGLE_LINE_MESSAGE_1[1]);
  // We have not got a full line of the second message. Return null and offset should be 0.
  assertNull(reader.readMessage(false));
  assertEquals(0L, reader.getByteOffset());

  // Write last part of the second line. We now seen the first line of the second message.
  // Return the first line and offset of the start of second message.
  logger.logText(SINGLE_LINE_MESSAGE_1[2]);
  assertEquals(Joiner.on("").join(SINGLE_LINE_MESSAGE_0), TextMessageReader.bufToString(reader.readMessage(false)));
  assertEquals(byteOffsetOfStartOfSecondMessage, reader.getByteOffset());

  reader.close();
}
 
Example 16
Source File: DotFloo.java    From floobits-intellij with Apache License 2.0 4 votes vote down vote up
public static File path(String base_dir) {
    return new File(FilenameUtils.concat(base_dir, ".floo"));
}
 
Example 17
Source File: TimelineMetricAggregatorFactory.java    From ambari-metrics with Apache License 2.0 4 votes vote down vote up
/**
 * Minute aggregation for cluster.
 * Interval : 5 mins
 */
public static TimelineMetricAggregator createTimelineClusterAggregatorMinute(
  PhoenixHBaseAccessor hBaseAccessor, Configuration metricsConf,
  TimelineMetricMetadataManager metadataManager,
  MetricCollectorHAController haController) {

  String checkpointDir = metricsConf.get(
    TIMELINE_METRICS_AGGREGATOR_CHECKPOINT_DIR, DEFAULT_CHECKPOINT_LOCATION);

  String checkpointLocation = FilenameUtils.concat(checkpointDir,
    CLUSTER_AGGREGATOR_MINUTE_CHECKPOINT_FILE);

  long sleepIntervalMillis = SECONDS.toMillis(metricsConf.getLong
    (CLUSTER_AGGREGATOR_MINUTE_SLEEP_INTERVAL, 300l));

  int checkpointCutOffMultiplier = metricsConf.getInt
    (CLUSTER_AGGREGATOR_MINUTE_CHECKPOINT_CUTOFF_MULTIPLIER, 2);

  String inputTableName = METRICS_CLUSTER_AGGREGATE_TABLE_NAME;
  String outputTableName = METRICS_CLUSTER_AGGREGATE_MINUTE_TABLE_NAME;
  String aggregatorDisabledParam = CLUSTER_AGGREGATOR_MINUTE_DISABLED;

  if (useGroupByAggregator(metricsConf)) {
    return new org.apache.ambari.metrics.core.timeline.aggregators.v2.TimelineMetricClusterAggregator(
      METRIC_AGGREGATE_MINUTE,
      hBaseAccessor, metricsConf,
      checkpointLocation,
      sleepIntervalMillis,
      checkpointCutOffMultiplier,
      aggregatorDisabledParam,
      inputTableName,
      outputTableName,
      120000l,
      haController
    );
  }

  return new TimelineMetricClusterAggregator(
    METRIC_AGGREGATE_MINUTE,
    metadataManager,
    hBaseAccessor, metricsConf,
    checkpointLocation,
    sleepIntervalMillis,
    checkpointCutOffMultiplier,
    aggregatorDisabledParam,
    inputTableName,
    outputTableName,
    120000l,
    haController
  );
}
 
Example 18
Source File: LocalFileGraphSaver.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public void saveBestModel(ComputationGraph net, double score) throws IOException {
    String confOut = FilenameUtils.concat(directory, BEST_GRAPH_BIN);
    save(net, confOut);
}
 
Example 19
Source File: TestExport.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testBatchAndExportMultiDataSetsFunction() throws Exception {
        String baseDir = System.getProperty("java.io.tmpdir");
        baseDir = FilenameUtils.concat(baseDir, "dl4j_spark_testBatchAndExportMDS/");
        baseDir = baseDir.replaceAll("\\\\", "/");
        File f = new File(baseDir);
        if (f.exists())
            FileUtils.deleteDirectory(f);
        f.mkdir();
        f.deleteOnExit();
        int minibatchSize = 5;
        int nIn = 4;
        int nOut = 3;

        List<MultiDataSet> dataSets = new ArrayList<>();
        dataSets.add(new org.nd4j.linalg.dataset.MultiDataSet(Nd4j.create(10, nIn), Nd4j.create(10, nOut))); //Larger than minibatch size -> tests splitting
        for (int i = 0; i < 98; i++) {
            if (i % 2 == 0) {
                dataSets.add(new org.nd4j.linalg.dataset.MultiDataSet(Nd4j.create(5, nIn), Nd4j.create(5, nOut)));
            } else {
                dataSets.add(new org.nd4j.linalg.dataset.MultiDataSet(Nd4j.create(1, nIn), Nd4j.create(1, nOut)));
                dataSets.add(new org.nd4j.linalg.dataset.MultiDataSet(Nd4j.create(1, nIn), Nd4j.create(1, nOut)));
                dataSets.add(new org.nd4j.linalg.dataset.MultiDataSet(Nd4j.create(3, nIn), Nd4j.create(3, nOut)));
            }
        }

        Collections.shuffle(dataSets, new Random(12345));

        JavaRDD<MultiDataSet> rdd = sc.parallelize(dataSets);
        rdd = rdd.repartition(1); //For testing purposes (should get exactly 100 out, but maybe more with more partitions)


        JavaRDD<String> pathsRdd = rdd.mapPartitionsWithIndex(
                        new BatchAndExportMultiDataSetsFunction(minibatchSize, "file:///" + baseDir), true);

        List<String> paths = pathsRdd.collect();
        assertEquals(100, paths.size());

        File[] files = f.listFiles();
        assertNotNull(files);

        int count = 0;
        for (File file : files) {
            if (!file.getPath().endsWith(".bin"))
                continue;
//            System.out.println(file);
            MultiDataSet ds = new org.nd4j.linalg.dataset.MultiDataSet();
            ds.load(file);
            assertEquals(minibatchSize, ds.getFeatures(0).size(0));
            assertEquals(minibatchSize, ds.getLabels(0).size(0));

            count++;
        }

        assertEquals(100, count);

        FileUtils.deleteDirectory(f);
    }
 
Example 20
Source File: TimelineMetricAggregatorFactory.java    From ambari-metrics with Apache License 2.0 4 votes vote down vote up
/**
 * Hourly aggregation for hosts.
 * Interval : 1 hour
 */
public static TimelineMetricAggregator createTimelineMetricAggregatorHourly
  (PhoenixHBaseAccessor hBaseAccessor, Configuration metricsConf,
   TimelineMetricMetadataManager metadataManager,
   MetricCollectorHAController haController) {

  String checkpointDir = metricsConf.get(
    TIMELINE_METRICS_AGGREGATOR_CHECKPOINT_DIR, DEFAULT_CHECKPOINT_LOCATION);
  String checkpointLocation = FilenameUtils.concat(checkpointDir,
    HOST_AGGREGATE_HOURLY_CHECKPOINT_FILE);
  long sleepIntervalMillis = SECONDS.toMillis(metricsConf.getLong
    (HOST_AGGREGATOR_HOUR_SLEEP_INTERVAL, 3600l));

  int checkpointCutOffMultiplier = metricsConf.getInt
    (HOST_AGGREGATOR_HOUR_CHECKPOINT_CUTOFF_MULTIPLIER, 2);
  String hostAggregatorDisabledParam = HOST_AGGREGATOR_HOUR_DISABLED;

  String inputTableName = METRICS_AGGREGATE_MINUTE_TABLE_NAME;
  String outputTableName = METRICS_AGGREGATE_HOURLY_TABLE_NAME;

  if (useGroupByAggregator(metricsConf)) {
    return new org.apache.ambari.metrics.core.timeline.aggregators.v2.TimelineMetricHostAggregator(
      METRIC_RECORD_HOURLY,
      hBaseAccessor, metricsConf,
      checkpointLocation,
      sleepIntervalMillis,
      checkpointCutOffMultiplier,
      hostAggregatorDisabledParam,
      inputTableName,
      outputTableName,
      3600000l,
      haController
    );
  }

  return new TimelineMetricHostAggregator(
    METRIC_RECORD_HOURLY,
    metadataManager,
    hBaseAccessor, metricsConf,
    checkpointLocation,
    sleepIntervalMillis,
    checkpointCutOffMultiplier,
    hostAggregatorDisabledParam,
    inputTableName,
    outputTableName,
    3600000l,
    haController);
}