Java Code Examples for org.nd4j.linalg.factory.Nd4j#hstack()

The following examples show how to use org.nd4j.linalg.factory.Nd4j#hstack() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ElementWiseStrideTests.java    From nd4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testHstackConcatCols(){
    int rows = 10;
    INDArray[] arr = new INDArray[5];

    for( int i=0; i<arr.length; i++ ){
        arr[i] = Nd4j.linspace(i*rows,(i+1)*rows-1, rows).transpose();
    }

    INDArray expected = Nd4j.linspace(0,arr.length*rows-1, arr.length*rows).reshape('f',rows,arr.length);
    INDArray actual = Nd4j.hstack(arr);

    System.out.println(expected);
    System.out.println();
    System.out.println(actual);
    assertEquals(expected, actual);
}
 
Example 2
Source File: SumDataSource.java    From FederatedAndroidTrainer with MIT License 6 votes vote down vote up
@Override
public FederatedDataSet getTrainingData() {
    Random rand = new Random(seed);
    double[] sum = new double[N_SAMPLES];
    double[] input1 = new double[N_SAMPLES];
    double[] input2 = new double[N_SAMPLES];
    for (int i = 0; i < N_SAMPLES; i++) {
        input1[i] = MIN_RANGE + (MAX_RANGE - MIN_RANGE) * rand.nextDouble();
        input2[i] = MIN_RANGE + (MAX_RANGE - MIN_RANGE) * rand.nextDouble();
        sum[i] = input1[i] + input2[i];
    }
    INDArray inputNDArray1 = Nd4j.create(input1, new int[]{N_SAMPLES, 1});
    INDArray inputNDArray2 = Nd4j.create(input2, new int[]{N_SAMPLES, 1});
    INDArray inputNDArray = Nd4j.hstack(inputNDArray1, inputNDArray2);
    INDArray outPut = Nd4j.create(sum, new int[]{N_SAMPLES, 1});
    DataSet dataSet = new DataSet(inputNDArray, outPut);
    dataSet.shuffle();
    return new FederatedDataSetImpl(dataSet);
}
 
Example 3
Source File: SumDataSource.java    From FederatedAndroidTrainer with MIT License 6 votes vote down vote up
@Override
public FederatedDataSet getTestData() {
    Random rand = new Random(seed);
    int numSamples = N_SAMPLES/10;
    double[] sum = new double[numSamples];
    double[] input1 = new double[numSamples];
    double[] input2 = new double[numSamples];
    for (int i = 0; i < numSamples; i++) {
        input1[i] = MIN_RANGE + (MAX_RANGE - MIN_RANGE) * rand.nextDouble();
        input2[i] = MIN_RANGE + (MAX_RANGE - MIN_RANGE) * rand.nextDouble();
        sum[i] = input1[i] + input2[i];
    }
    INDArray inputNDArray1 = Nd4j.create(input1, new int[]{numSamples, 1});
    INDArray inputNDArray2 = Nd4j.create(input2, new int[]{numSamples, 1});
    INDArray inputNDArray = Nd4j.hstack(inputNDArray1, inputNDArray2);
    INDArray outPut = Nd4j.create(sum, new int[]{numSamples, 1});
    return new FederatedDataSetImpl(new DataSet(inputNDArray, outPut));
}
 
Example 4
Source File: ConcatTests.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testConcatHorizontally() {
    INDArray rowVector = Nd4j.ones(1, 5);
    INDArray other = Nd4j.ones(1, 5);
    INDArray concat = Nd4j.hstack(other, rowVector);
    assertEquals(rowVector.rows(), concat.rows());
    assertEquals(rowVector.columns() * 2, concat.columns());

}
 
Example 5
Source File: ConcatTests.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testConcatHorizontally() {
    INDArray rowVector = Nd4j.ones(5);
    INDArray other = Nd4j.ones(5);
    INDArray concat = Nd4j.hstack(other, rowVector);
    assertEquals(rowVector.rows(), concat.rows());
    assertEquals(rowVector.columns() * 2, concat.columns());

}
 
Example 6
Source File: ConcatTestsC.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testConcatRowVectors() {
    INDArray rowVector = Nd4j.create(new double[] {1, 2, 3, 4, 5, 6}, new int[] {1, 6});
    INDArray matrix = Nd4j.create(new double[] {7, 8, 9, 10, 11, 12}, new int[] {1, 6});

    INDArray assertion1 = Nd4j.create(new double[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}, new int[] {1, 12});
    INDArray assertion0 = Nd4j.create(new double[][] {{1, 2, 3, 4, 5, 6}, {7, 8, 9, 10, 11, 12}});

    INDArray concat1 = Nd4j.hstack(rowVector, matrix);
    INDArray concat0 = Nd4j.vstack(rowVector, matrix);
    assertEquals(assertion1, concat1);
    assertEquals(assertion0, concat0);
}
 
Example 7
Source File: CoverageModelEMWorkspace.java    From gatk-protected with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
/**
 * Saves read depth posteriors to disk
 *
 * @param outputPath the output path
 */
protected void writeReadDepthPosteriors(final String outputPath) {
    logger.info("Saving read depth posteriors...");
    final List<String> sampleNames = processedReadCounts.columnNames();
    final INDArray combinedReadDepthPosteriors = Nd4j.hstack(sampleMeanLogReadDepths, sampleVarLogReadDepths);
    final File sampleReadDepthPosteriorsFile = new File(outputPath, CoverageModelGlobalConstants.SAMPLE_READ_DEPTH_POSTERIORS_FILENAME);
    Nd4jIOUtils.writeNDArrayMatrixToTextFile(combinedReadDepthPosteriors, sampleReadDepthPosteriorsFile,
            "SAMPLE_NAME", sampleNames, Arrays.asList("READ_DEPTH_MEAN", "READ_DEPTH_VAR"));
}
 
Example 8
Source File: Pruning.java    From ml-models with Apache License 2.0 5 votes vote down vote up
public Embedding prune(Embedding prevEmbedding, Embedding embedding) {

        INDArray embeddingToPrune = Nd4j.hstack(prevEmbedding.getNDEmbedding(), embedding.getNDEmbedding());
        Feature[] featuresToPrune = ArrayUtils.addAll(prevEmbedding.getFeatures(), embedding.getFeatures());


        progressLogger.log("Feature Pruning: Creating features graph");
        final Graph graph = loadFeaturesGraph(embeddingToPrune, prevEmbedding.features.length);
        progressLogger.log("Feature Pruning: Created features graph");

        progressLogger.log("Feature Pruning: Finding features to keep");
        int[] featureIdsToKeep = findConnectedComponents(graph)
                .collect(Collectors.groupingBy(item -> item.setId))
                .values()
                .stream()
                .mapToInt(results -> results.stream().mapToInt(value -> (int) value.nodeId).min().getAsInt())
                .toArray();
        progressLogger.log("Feature Pruning: Found features to keep");

        progressLogger.log("Feature Pruning: Pruning embeddings");
        INDArray prunedNDEmbedding = pruneEmbedding(embeddingToPrune, featureIdsToKeep);
        progressLogger.log("Feature Pruning: Pruned embeddings");


        Feature[] prunedFeatures = new Feature[featureIdsToKeep.length];

        for (int index = 0; index < featureIdsToKeep.length; index++) {
            prunedFeatures[index] = featuresToPrune[featureIdsToKeep[index]];
        }


        return new Embedding(prunedFeatures, prunedNDEmbedding);
    }
 
Example 9
Source File: BaseAggregation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public INDArray getAccumulatedResult() {

    if (aggregationWidth == 1) {
        return chunks.get((short) 0);
    } else
        return Nd4j.hstack(chunks.values());
}
 
Example 10
Source File: FrozenLayerTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testFrozen() {
    DataSet randomData = new DataSet(Nd4j.rand(10, 4), Nd4j.rand(10, 3));

    NeuralNetConfiguration.Builder overallConf = new NeuralNetConfiguration.Builder().updater(new Sgd(0.1))
                    .activation(Activation.IDENTITY);

    FineTuneConfiguration finetune = new FineTuneConfiguration.Builder().updater(new Sgd(0.1)).build();

    MultiLayerNetwork modelToFineTune = new MultiLayerNetwork(overallConf.clone().list()
                    .layer(0, new DenseLayer.Builder().nIn(4).nOut(3).build())
                    .layer(1, new DenseLayer.Builder().nIn(3).nOut(2).build())
                    .layer(2, new DenseLayer.Builder().nIn(2).nOut(3).build())
                    .layer(3, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
                                    LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(3).nOut(3)
                                                    .build())
                    .build());

    modelToFineTune.init();
    List<INDArray> ff = modelToFineTune.feedForwardToLayer(2, randomData.getFeatures(), false);
    INDArray asFrozenFeatures = ff.get(2);

    MultiLayerNetwork modelNow = new TransferLearning.Builder(modelToFineTune).fineTuneConfiguration(finetune)
                    .setFeatureExtractor(1).build();

    INDArray paramsLastTwoLayers =
                    Nd4j.hstack(modelToFineTune.getLayer(2).params(), modelToFineTune.getLayer(3).params());
    MultiLayerNetwork notFrozen = new MultiLayerNetwork(overallConf.clone().list()
                    .layer(0, new DenseLayer.Builder().nIn(2).nOut(3).build())
                    .layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
                                    LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(3).nOut(3)
                                                    .build())
                    .build(), paramsLastTwoLayers);

    //        assertEquals(modelNow.getLayer(2).conf(), notFrozen.getLayer(0).conf());  //Equal, other than names
    //        assertEquals(modelNow.getLayer(3).conf(), notFrozen.getLayer(1).conf());  //Equal, other than names

    //Check: forward pass
    INDArray outNow = modelNow.output(randomData.getFeatures());
    INDArray outNotFrozen = notFrozen.output(asFrozenFeatures);
    assertEquals(outNow, outNotFrozen);

    for (int i = 0; i < 5; i++) {
        notFrozen.fit(new DataSet(asFrozenFeatures, randomData.getLabels()));
        modelNow.fit(randomData);
    }

    INDArray expected = Nd4j.hstack(modelToFineTune.getLayer(0).params(), modelToFineTune.getLayer(1).params(),
                    notFrozen.params());
    INDArray act = modelNow.params();
    assertEquals(expected, act);
}
 
Example 11
Source File: TransferLearningHelperTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testMLN() {
    DataSet randomData = new DataSet(Nd4j.rand(10, 4), Nd4j.rand(10, 3));

    NeuralNetConfiguration.Builder overallConf = new NeuralNetConfiguration.Builder().updater(new Sgd(0.1))
                    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                    .activation(Activation.IDENTITY);

    MultiLayerNetwork modelToFineTune = new MultiLayerNetwork(overallConf.clone().list()
                    .layer(0, new DenseLayer.Builder().nIn(4).nOut(3).build())
                    .layer(1, new DenseLayer.Builder().nIn(3).nOut(2).build())
                    .layer(2, new DenseLayer.Builder().nIn(2).nOut(3).build())
                    .layer(3, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
                                    LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(3).nOut(3)
                                                    .build())
                    .build());

    modelToFineTune.init();
    MultiLayerNetwork modelNow = new TransferLearning.Builder(modelToFineTune).setFeatureExtractor(1).build();
    List<INDArray> ff = modelToFineTune.feedForwardToLayer(2, randomData.getFeatures(), false);
    INDArray asFrozenFeatures = ff.get(2);

    TransferLearningHelper helper = new TransferLearningHelper(modelToFineTune, 1);

    INDArray paramsLastTwoLayers =
                    Nd4j.hstack(modelToFineTune.getLayer(2).params(), modelToFineTune.getLayer(3).params());
    MultiLayerNetwork notFrozen = new MultiLayerNetwork(overallConf.clone().list()
                    .layer(0, new DenseLayer.Builder().nIn(2).nOut(3).build())
                    .layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
                                    LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(3).nOut(3)
                                                    .build())
                    .build(), paramsLastTwoLayers);

    assertEquals(asFrozenFeatures, helper.featurize(randomData).getFeatures());
    assertEquals(randomData.getLabels(), helper.featurize(randomData).getLabels());

    for (int i = 0; i < 5; i++) {
        notFrozen.fit(new DataSet(asFrozenFeatures, randomData.getLabels()));
        helper.fitFeaturized(helper.featurize(randomData));
        modelNow.fit(randomData);
    }

    INDArray expected = Nd4j.hstack(modelToFineTune.getLayer(0).params(), modelToFineTune.getLayer(1).params(),
                    notFrozen.params());
    INDArray act = modelNow.params();
    assertEquals(expected, act);
}
 
Example 12
Source File: DeepGL.java    From ml-models with Apache License 2.0 4 votes vote down vote up
@Override
public void run() {
    for (; ; ) {
        final int nodeId = nodeQueue.getAndIncrement();
        if (nodeId >= nodeCount || !running()) {
            return;
        }

        List<Integer> bothNeighbours = new LinkedList<>();
        List<Integer> inNeighbours = new LinkedList<>();
        List<Integer> outNeighbours = new LinkedList<>();
        final List<List<Integer>> neighbourhoods = Arrays.asList(outNeighbours, inNeighbours, bothNeighbours);

        graph.forEachRelationship(nodeId, Direction.BOTH, (sourceNodeId, targetNodeId, relationId) -> {
            bothNeighbours.add(targetNodeId);
            if (graph.exists(sourceNodeId, targetNodeId, Direction.OUTGOING)) {
                outNeighbours.add(targetNodeId);
            } else {
                inNeighbours.add(targetNodeId);
            }
            return true;
        });

        List<INDArray> arrays = new ArrayList<>();
        for (List<Integer> neighbourhood : neighbourhoods) {
            if (neighbourhood.isEmpty()) {
                arrays.add(Nd4j.zeros(operators.length * prevEmbedding.columns()));
            } else {
                final INDArray neighbourhoodFeatures = prevEmbedding.getRows(ArrayUtils.toPrimitive(neighbourhood.toArray(new Integer[0])));
                for (RelOperator operator : operators) {
                    final INDArray opResult = operator.op(neighbourhoodFeatures, prevEmbedding.getRow(nodeId));
                    arrays.add(opResult);
                }
            }
        }

        final INDArray nodeFeatures = Nd4j.hstack(arrays);
        embedding.putRow(nodeId, nodeFeatures);


    }
}
 
Example 13
Source File: TransferLearningMLNTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testAllWithCNNNew() {
    Nd4j.getRandom().setSeed(12345);

    DataSet randomData = new DataSet(Nd4j.rand(DataType.FLOAT,10, 28 * 28 * 3).reshape(10, 3, 28, 28), TestUtils.randomOneHot(10, 10));
    MultiLayerNetwork modelToFineTune =
            new MultiLayerNetwork(
                    new NeuralNetConfiguration.Builder().seed(123)
                            .weightInit(WeightInit.XAVIER)
                            .updater(new Nesterovs(0.01, 0.9))
                            .list()
                            .layer(0, new ConvolutionLayer.Builder(5, 5).nIn(3).stride(1, 1)
                                    .nOut(20).activation(Activation.IDENTITY).build())
                            .layer(1, new SubsamplingLayer.Builder(PoolingType.MAX)
                                    .kernelSize(2, 2).stride(2, 2).build())
                            .layer(2, new ConvolutionLayer.Builder(5, 5).stride(1, 1)
                                    .nOut(50).activation(Activation.IDENTITY).build())
                            .layer(3, new SubsamplingLayer.Builder(PoolingType.MAX)
                                    .kernelSize(2, 2).stride(2, 2).build())
                            .layer(4, new DenseLayer.Builder().activation(Activation.RELU).nOut(500).build())
                            .layer(5, new DenseLayer.Builder().activation(Activation.RELU).nOut(250).build())
                            .layer(6, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
                                    .nOut(100).activation(Activation.SOFTMAX).build())
                            .setInputType(InputType.convolutionalFlat(28, 28, 3)) //See note below
                            .build());
    modelToFineTune.init();
    INDArray asFrozenFeatures = modelToFineTune.feedForwardToLayer(2, randomData.getFeatures(), false).get(2); //10x20x12x12

    NeuralNetConfiguration.Builder equivalentConf = new NeuralNetConfiguration.Builder().updater(new Sgd(0.2));
    FineTuneConfiguration overallConf = new FineTuneConfiguration.Builder().updater(new Sgd(0.2)).build();

    MultiLayerNetwork modelNow = new TransferLearning.Builder(modelToFineTune).fineTuneConfiguration(overallConf)
                    .setFeatureExtractor(1).removeLayersFromOutput(5)
                    .addLayer(new DenseLayer.Builder().activation(Activation.RELU).nIn(12 * 12 * 20).nOut(300)
                                    .build())
                    .addLayer(new DenseLayer.Builder().activation(Activation.RELU).nIn(300).nOut(150).build())
                    .addLayer(new DenseLayer.Builder().activation(Activation.RELU).nIn(150).nOut(50).build())
                    .addLayer(new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
                                    .activation(Activation.SOFTMAX).nIn(50).nOut(10).build())
                    .setInputPreProcessor(2, new CnnToFeedForwardPreProcessor(12, 12, 20)).build();


    MultiLayerNetwork notFrozen = new MultiLayerNetwork(equivalentConf.list()
                    .layer(0, new DenseLayer.Builder().activation(Activation.RELU).nIn(12 * 12 * 20).nOut(300)
                                    .build())
                    .layer(1, new DenseLayer.Builder().activation(Activation.RELU).nIn(300).nOut(150).build())
                    .layer(2, new DenseLayer.Builder().activation(Activation.RELU).nIn(150).nOut(50).build())
                    .layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).nIn(50)
                                    .nOut(10).activation(Activation.SOFTMAX).build())
                    .inputPreProcessor(0, new CnnToFeedForwardPreProcessor(12, 12, 20))
                    .build());
    notFrozen.init();

    assertArrayEquals(modelToFineTune.getLayer(0).params().shape(), modelNow.getLayer(0).params().shape());
    //subsampling has no params
    //assertArrayEquals(modelExpectedArch.getLayer(1).params().shape(), modelNow.getLayer(1).params().shape());
    assertArrayEquals(notFrozen.getLayer(0).params().shape(), modelNow.getLayer(2).params().shape());
    modelNow.getLayer(2).setParams(notFrozen.getLayer(0).params());
    assertArrayEquals(notFrozen.getLayer(1).params().shape(), modelNow.getLayer(3).params().shape());
    modelNow.getLayer(3).setParams(notFrozen.getLayer(1).params());
    assertArrayEquals(notFrozen.getLayer(2).params().shape(), modelNow.getLayer(4).params().shape());
    modelNow.getLayer(4).setParams(notFrozen.getLayer(2).params());
    assertArrayEquals(notFrozen.getLayer(3).params().shape(), modelNow.getLayer(5).params().shape());
    modelNow.getLayer(5).setParams(notFrozen.getLayer(3).params());

    int i = 0;
    while (i < 3) {
        notFrozen.fit(new DataSet(asFrozenFeatures, randomData.getLabels()));
        modelNow.fit(randomData);
        i++;
    }

    INDArray expectedParams = Nd4j.hstack(modelToFineTune.getLayer(0).params(), notFrozen.params());
    assertEquals(expectedParams, modelNow.params());
}
 
Example 14
Source File: MtcnnUtil.java    From mtcnn-java with Apache License 2.0 4 votes vote down vote up
/**
 * Use heatmap to generate bounding boxes.
 *
 * original code:
 *  - https://github.com/kpzhang93/MTCNN_face_detection_alignment/blob/master/code/codes/MTCNNv2/generateBoundingBox.m
 *  - https://github.com/davidsandberg/facenet/blob/master/src/align/detect_face.py#L660
 *
 * @param imap
 * @param reg
 * @param scale
 * @param stepThreshold
 * @return Returns the generated bboxes
 */
public static INDArray[] generateBoundingBox(INDArray imap, INDArray reg, double scale, double stepThreshold) {

	int stride = 2;
	int cellSize = 12;

	// imap = np.transpose(imap)
	// y, x = np.where(imap >= t)
	// imap = imap.transpose();
	INDArray bb = MtcnnUtil.getIndexWhereMatrix(imap, v -> v >= stepThreshold);
	//INDArray bb = MtcnnUtil.getIndexWhere3(imap, Conditions.greaterThanOrEqual(stepThreshold));

	if (bb.isEmpty()) {
		return new INDArray[] { Nd4j.empty(), Nd4j.empty() };
	}

	INDArray yx = bb.transpose();

	// TODO : implement the following code fragment
	//  if y.shape[0] == 1:
	//    dx1 = np.flipud(dx1)
	//    dy1 = np.flipud(dy1)
	//    dx2 = np.flipud(dx2)
	//    dy2 = np.flipud(dy2)
	if (yx.size(0) == 1) {
		throw new IllegalStateException("TODO");
	}

	//    q1 = np.fix((stride*bb+1)/scale)
	//    q2 = np.fix((stride*bb+cellsize-1+1)/scale)
	INDArray q1 = Transforms.floor(bb.mul(stride).add(1).div(scale));
	INDArray q2 = Transforms.floor(bb.mul(stride).add(cellSize).div(scale));

	//    dx1 = np.transpose(reg[:,:,0])
	//    dy1 = np.transpose(reg[:,:,1])
	//    dx2 = np.transpose(reg[:,:,2])
	//    dy2 = np.transpose(reg[:,:,3])
	INDArray dx1 = reg.get(all(), all(), point(0));
	INDArray dy1 = reg.get(all(), all(), point(1));
	INDArray dx2 = reg.get(all(), all(), point(2));
	INDArray dy2 = reg.get(all(), all(), point(3));

	// reg = np.transpose(np.vstack([ dx1[(y,x)], dy1[(y,x)], dx2[(y,x)], dy2[(y,x)] ]))
	INDArray outReg = Nd4j.vstack(dx1.get(yx), dy1.get(yx), dx2.get(yx), dy2.get(yx)).transpose();

	//  if reg.size == 0:
	//    reg = np.empty(shape=(0, 3))
	if (outReg.isEmpty()) {
		outReg = Nd4j.empty();
	}

	INDArray score = imap.get(yx).transpose();

	INDArray boundingBox = Nd4j.hstack(q1, q2, score, outReg);

	return new INDArray[] { boundingBox, outReg };
}
 
Example 15
Source File: FrozenLayerTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void cloneMLNFrozen() {

    DataSet randomData = new DataSet(Nd4j.rand(10, 4), Nd4j.rand(10, 3));

    NeuralNetConfiguration.Builder overallConf = new NeuralNetConfiguration.Builder().updater(new Sgd(0.1))
                    .activation(Activation.IDENTITY);
    MultiLayerNetwork modelToFineTune = new MultiLayerNetwork(overallConf.list()
                    .layer(0, new DenseLayer.Builder().nIn(4).nOut(3).build())
                    .layer(1, new DenseLayer.Builder().nIn(3).nOut(2).build())
                    .layer(2, new DenseLayer.Builder().nIn(2).nOut(3).build())
                    .layer(3, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
                                    LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(3).nOut(3)
                                                    .build())
                    .build());

    modelToFineTune.init();
    INDArray asFrozenFeatures = modelToFineTune.feedForwardToLayer(2, randomData.getFeatures(), false).get(2);
    MultiLayerNetwork modelNow = new TransferLearning.Builder(modelToFineTune).setFeatureExtractor(1).build();

    MultiLayerNetwork clonedModel = modelNow.clone();

    //Check json
    assertEquals(modelNow.getLayerWiseConfigurations().toJson(), clonedModel.getLayerWiseConfigurations().toJson());

    //Check params
    assertEquals(modelNow.params(), clonedModel.params());

    MultiLayerNetwork notFrozen = new MultiLayerNetwork(
                    overallConf.list().layer(0, new DenseLayer.Builder().nIn(2).nOut(3).build())
                                    .layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
                                                    LossFunctions.LossFunction.MCXENT)
                                                                    .activation(Activation.SOFTMAX).nIn(3).nOut(3)
                                                                    .build())
                                    .build(),
                    Nd4j.hstack(modelToFineTune.getLayer(2).params(), modelToFineTune.getLayer(3).params()));

    int i = 0;
    while (i < 5) {
        notFrozen.fit(new DataSet(asFrozenFeatures, randomData.getLabels()));
        modelNow.fit(randomData);
        clonedModel.fit(randomData);
        i++;
    }

    INDArray expectedParams = Nd4j.hstack(modelToFineTune.getLayer(0).params(),
                    modelToFineTune.getLayer(1).params(), notFrozen.params());
    assertEquals(expectedParams, modelNow.params());
    assertEquals(expectedParams, clonedModel.params());

}
 
Example 16
Source File: FrozenLayerTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void cloneCompGraphFrozen() {

    DataSet randomData = new DataSet(Nd4j.rand(10, 4), Nd4j.rand(10, 3));

    NeuralNetConfiguration.Builder overallConf = new NeuralNetConfiguration.Builder().updater(new Sgd(0.1))
                    .activation(Activation.IDENTITY);

    ComputationGraph modelToFineTune = new ComputationGraph(overallConf.graphBuilder().addInputs("layer0In")
                    .addLayer("layer0", new DenseLayer.Builder().nIn(4).nOut(3).build(), "layer0In")
                    .addLayer("layer1", new DenseLayer.Builder().nIn(3).nOut(2).build(), "layer0")
                    .addLayer("layer2", new DenseLayer.Builder().nIn(2).nOut(3).build(), "layer1")
                    .addLayer("layer3",
                                    new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
                                                    LossFunctions.LossFunction.MCXENT)
                                                                    .activation(Activation.SOFTMAX).nIn(3).nOut(3)
                                                                    .build(),
                                    "layer2")
                    .setOutputs("layer3").build());

    modelToFineTune.init();
    INDArray asFrozenFeatures = modelToFineTune.feedForward(randomData.getFeatures(), false).get("layer1");
    ComputationGraph modelNow =
                    new TransferLearning.GraphBuilder(modelToFineTune).setFeatureExtractor("layer1").build();

    ComputationGraph clonedModel = modelNow.clone();

    //Check json
    assertEquals(clonedModel.getConfiguration().toJson(), modelNow.getConfiguration().toJson());

    //Check params
    assertEquals(modelNow.params(), clonedModel.params());

    ComputationGraph notFrozen = new ComputationGraph(overallConf.graphBuilder().addInputs("layer0In")
                    .addLayer("layer0", new DenseLayer.Builder().nIn(2).nOut(3).build(), "layer0In")
                    .addLayer("layer1",
                                    new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
                                                    LossFunctions.LossFunction.MCXENT)
                                                                    .activation(Activation.SOFTMAX).nIn(3).nOut(3)
                                                                    .build(),
                                    "layer0")
                    .setOutputs("layer1").build());
    notFrozen.init();
    notFrozen.setParams(Nd4j.hstack(modelToFineTune.getLayer("layer2").params(),
                    modelToFineTune.getLayer("layer3").params()));


    int i = 0;
    while (i < 5) {
        notFrozen.fit(new DataSet(asFrozenFeatures, randomData.getLabels()));
        modelNow.fit(randomData);
        clonedModel.fit(randomData);
        i++;
    }

    INDArray expectedParams = Nd4j.hstack(modelToFineTune.getLayer("layer0").params(),
                    modelToFineTune.getLayer("layer1").params(), notFrozen.params());
    assertEquals(expectedParams, modelNow.params());
    assertEquals(expectedParams, clonedModel.params());
}
 
Example 17
Source File: WindowConverter.java    From deeplearning4j with Apache License 2.0 3 votes vote down vote up
/**
 * Converts a window (each word in the window)
 *
 * in to a vector.
 *
 * Keep in mind each window is a multi word context.
 *
 * From there, each word uses the passed in model
 * as a lookup table to get what vectors are relevant
 * to the passed in windows
 * @param window the window to take in.
 * @param vec the model to use as a lookup table
 * @return a concatneated 1 row array
 * containing all of the numbers for each word in the window
 */
public static INDArray asExampleMatrix(Window window, Word2Vec vec) {
    INDArray[] data = new INDArray[window.getWords().size()];
    for (int i = 0; i < data.length; i++) {
        data[i] = vec.getWordVectorMatrix(window.getWord(i));

        // if there's null elements
        if (data[i] == null)
            data[i] = Nd4j.zeros(1, vec.getLayerSize());
    }
    return Nd4j.hstack(data);
}
 
Example 18
Source File: PLNetDyadRanker.java    From AILibs with GNU Affero General Public License v3.0 2 votes vote down vote up
/**
 * Converts a dyad to a {@link INDArray} row vector consisting of a
 * concatenation of the instance and alternative features.
 *
 * @param dyad
 *            The dyad to convert.
 * @return The dyad in {@link INDArray} row vector form.
 */
private INDArray dyadToVector(final IDyad dyad) {
	INDArray instanceOfDyad = Nd4j.create(dyad.getContext().asArray());
	INDArray alternativeOfDyad = Nd4j.create(dyad.getAlternative().asArray());
	return Nd4j.hstack(instanceOfDyad, alternativeOfDyad);
}
 
Example 19
Source File: CrashTest.java    From nd4j with Apache License 2.0 2 votes vote down vote up
protected void op(INDArray x, INDArray y, int i) {
    // broadcast along row & column
    INDArray row = Nd4j.ones(64);
    INDArray column = Nd4j.ones(1024, 1);

    x.addiRowVector(row);
    x.addiColumnVector(column);

    // casual scalar
    x.addi(i * 2);

    // reduction along all dimensions
    float sum = x.sumNumber().floatValue();

    // index reduction
    Nd4j.getExecutioner().exec(new IMax(x), Integer.MAX_VALUE);

    // casual transform
    Nd4j.getExecutioner().exec(new Sqrt(x, x));

    //  dup
    INDArray x1 = x.dup(x.ordering());
    INDArray x2 = x.dup(x.ordering());
    INDArray x3 = x.dup('c');
    INDArray x4 = x.dup('f');


    // vstack && hstack
    INDArray vstack = Nd4j.vstack(x, x1, x2, x3, x4);

    INDArray hstack = Nd4j.hstack(x, x1, x2, x3, x4);

    // reduce3 call
    Nd4j.getExecutioner().exec(new ManhattanDistance(x, x2));


    // flatten call
    INDArray flat = Nd4j.toFlattened(x, x1, x2, x3, x4);


    // reduction along dimension: row & column
    INDArray max_0 = x.max(0);
    INDArray max_1 = x.max(1);


    // index reduction along dimension: row & column
    INDArray imax_0 = Nd4j.argMax(x, 0);
    INDArray imax_1 = Nd4j.argMax(x, 1);


    // logisoftmax, softmax & softmax derivative
    Nd4j.getExecutioner().exec(new OldSoftMax(x));
    Nd4j.getExecutioner().exec(new SoftMaxDerivative(x));
    Nd4j.getExecutioner().exec(new LogSoftMax(x));


    // BooleanIndexing
    BooleanIndexing.replaceWhere(x, 5f, Conditions.lessThan(8f));

    // assing on view
    BooleanIndexing.assignIf(x, x1, Conditions.greaterThan(-1000000000f));

    // std var along all dimensions
    float std = x.stdNumber().floatValue();

    // std var along row & col
    INDArray xStd_0 = x.std(0);
    INDArray xStd_1 = x.std(1);

    // blas call
    float dot = (float) Nd4j.getBlasWrapper().dot(x, x1);

    // mmul
    for (boolean tA : paramsA) {
        for (boolean tB : paramsB) {

            INDArray xT = tA ? x.dup() : x.dup().transpose();
            INDArray yT = tB ? y.dup() : y.dup().transpose();

            Nd4j.gemm(xT, yT, tA, tB);
        }
    }

    // specially for views, checking here without dup and rollover
    Nd4j.gemm(x, y, false, false);

    log.debug("Iteration passed: " + i);
}
 
Example 20
Source File: CrashTest.java    From deeplearning4j with Apache License 2.0 2 votes vote down vote up
protected void op(INDArray x, INDArray y, int i) {
    // broadcast along row & column
    INDArray row = Nd4j.ones(64);
    INDArray column = Nd4j.ones(1024, 1);

    x.addiRowVector(row);
    x.addiColumnVector(column);

    // casual scalar
    x.addi(i * 2);

    // reduction along all dimensions
    float sum = x.sumNumber().floatValue();

    // index reduction
    Nd4j.getExecutioner().exec(new ArgMax(x));

    // casual transform
    Nd4j.getExecutioner().exec(new Sqrt(x, x));

    //  dup
    INDArray x1 = x.dup(x.ordering());
    INDArray x2 = x.dup(x.ordering());
    INDArray x3 = x.dup('c');
    INDArray x4 = x.dup('f');


    // vstack && hstack
    INDArray vstack = Nd4j.vstack(x, x1, x2, x3, x4);

    INDArray hstack = Nd4j.hstack(x, x1, x2, x3, x4);

    // reduce3 call
    Nd4j.getExecutioner().exec(new ManhattanDistance(x, x2));


    // flatten call
    INDArray flat = Nd4j.toFlattened(x, x1, x2, x3, x4);


    // reduction along dimension: row & column
    INDArray max_0 = x.max(0);
    INDArray max_1 = x.max(1);


    // index reduction along dimension: row & column
    INDArray imax_0 = Nd4j.argMax(x, 0);
    INDArray imax_1 = Nd4j.argMax(x, 1);


    // logisoftmax, softmax & softmax derivative
    Nd4j.getExecutioner().exec((CustomOp) new SoftMax(x));
    Nd4j.getExecutioner().exec((CustomOp) new LogSoftMax(x));


    // BooleanIndexing
    BooleanIndexing.replaceWhere(x, 5f, Conditions.lessThan(8f));

    // assing on view
    BooleanIndexing.assignIf(x, x1, Conditions.greaterThan(-1000000000f));

    // std var along all dimensions
    float std = x.stdNumber().floatValue();

    // std var along row & col
    INDArray xStd_0 = x.std(0);
    INDArray xStd_1 = x.std(1);

    // blas call
    float dot = (float) Nd4j.getBlasWrapper().dot(x, x1);

    // mmul
    for (boolean tA : paramsA) {
        for (boolean tB : paramsB) {

            INDArray xT = tA ? x.dup() : x.dup().transpose();
            INDArray yT = tB ? y.dup() : y.dup().transpose();

            Nd4j.gemm(xT, yT, tA, tB);
        }
    }

    // specially for views, checking here without dup and rollover
    Nd4j.gemm(x, y, false, false);

    log.debug("Iteration passed: " + i);
}