Java Code Examples for org.nd4j.linalg.api.ndarray.INDArray

The following are top voted examples for showing how to use org.nd4j.linalg.api.ndarray.INDArray. These examples are extracted from open source projects. You can vote up the examples you like and your votes will be used in our system to generate more good examples.
Example 1
Project: anagnostes   File: NumbersDataFetcher.java   View source code 7 votes vote down vote up
@Override
public void fetch(int numExamples) {
	float[][] featureData = new float[numExamples][0];
	float[][] labelData = new float[numExamples][0];

	int examplesRead = 0;

	for (; examplesRead < numExamples; examplesRead++) {
		if (cursor + examplesRead >= m_allFileNames.size()) {
			break;
		}
		Entry<String, String> entry = m_allFileNames.get(cursor + examplesRead);

		featureData[examplesRead] = imageFileNameToMnsitFormat(entry.getValue());
		labelData[examplesRead] = toLabelArray(entry.getKey());
	}
	cursor += examplesRead;

	INDArray features = Nd4j.create(featureData);
	INDArray labels = Nd4j.create(labelData);
	curr = new DataSet(features, labels);
}
 
Example 2
Project: ijcnlp2017-cmaps   File: WordEmbeddingDistance.java   View source code 6 votes vote down vote up
public INDArray getConceptVector(Concept c) {

		Tokenizer tok = SimpleTokenizer.INSTANCE;

		List<INDArray> vectors = new ArrayList<INDArray>();
		int countUnk = 0;
		for (String word : tok.tokenize(c.name.toLowerCase().trim())) {
			if (wordVectors.hasWord(word))
				vectors.add(wordVectors.getWordVectorMatrix(word));
			else {
				vectors.add(unkVector);
				countUnk++;
			}
		}
		if (vectors.size() == countUnk)
			return null; // all tokens unknown
		INDArray allVectors = Nd4j.vstack(vectors);

		// sum or mean is irrelevant for cosine similarity
		INDArray conceptVector = allVectors.mean(0);

		return conceptVector;
	}
 
Example 3
Project: par2hier   File: Par2HierUtils.java   View source code 6 votes vote down vote up
/**
 * transforms paragraph vectors into hierarchical vectors
 * @param iterator iterator over docs
 * @param lookupTable the paragraph vector table
 * @param labels the labels
 * @param k the no. of centroids
 * @return a map doc->hierarchical vector
 */
static Map<String, INDArray> getPar2Hier(LabelAwareIterator iterator,
                                         WeightLookupTable<VocabWord> lookupTable,
                                         List<String> labels, int k, Method method) {
  Collections.sort(labels);
  LabelsSource labelsSource = iterator.getLabelsSource();
  PatriciaTrie<String> trie = new PatriciaTrie<>();
  for (String label : labels) {
    trie.put(label, label);
  }

  Map<String, INDArray> hvs = new TreeMap<>();
  // for each doc
  for (String node : labelsSource.getLabels()) {
    Par2HierUtils.getPar2HierVector(lookupTable, trie, node, k, hvs, method);
  }
  return hvs;
}
 
Example 4
Project: ijcnlp2017-cmaps   File: WordEmbeddingDistance.java   View source code 6 votes vote down vote up
@Override
public double computeSimilarity(Concept c1, Concept c2) {
	if (c1.name.toLowerCase().equals(c2.name.toLowerCase()))
		return 1;

	if (wordVectors == null) {
		this.loadWordVectors(type, dimension);
		int[] shape = wordVectors.lookupTable().getWeights().shape();
		System.out.println("word embeddings loaded, " + shape[0] + " " + shape[1]);
	}

	INDArray cVector1 = this.getConceptVector(c1);
	INDArray cVector2 = this.getConceptVector(c2);
	if (cVector1 == null || cVector2 == null)
		return Double.NaN;

	double dist = Transforms.cosineSim(cVector1, cVector2);

	if (Double.isNaN(dist))
		System.err.println("Embedding NaN");

	return dist;
}
 
Example 5
Project: Word2VecfJava   File: WordVectorSerializer.java   View source code 6 votes vote down vote up
private static Pair<List<String>, INDArray> fromText(String wordFilePath) throws IOException {
	BufferedReader reader = new BufferedReader(Common.asReaderUTF8Lenient(new FileInputStream(new File(wordFilePath))));
	String fstLine = reader.readLine();
	int vocabSize = Integer.parseInt(fstLine.split(" ")[0]);
	int layerSize = Integer.parseInt(fstLine.split(" ")[1]);
	List<String> wordVocab = Lists.newArrayList();
	INDArray wordVectors = Nd4j.create(vocabSize, layerSize);
	int n = 1;
	String line;
	while ((line = reader.readLine()) != null) {
		String[] values = line.split(" ");
		wordVocab.add(values[0]);
		Preconditions.checkArgument(layerSize == values.length - 1, "For file '%s', on line %s, layer size is %s, but found %s values in the word vector",
				wordFilePath, n, layerSize, values.length - 1); // Sanity check
		for (int d = 1; d < values.length; d++) wordVectors.putScalar(n - 1, d - 1, Float.parseFloat(values[d]));
		n++;
	}
	return new Pair<>(wordVocab, wordVectors);
}
 
Example 6
Project: NeuralNetworksLite   File: SentimentExampleIterator.java   View source code 6 votes vote down vote up
/**
 * Used post training to convert a String to a features INDArray that can be passed to the network output method
 *
 * @param reviewContents Contents of the review to vectorize
 * @param maxLength Maximum length (if review is longer than this: truncate to maxLength). Use Integer.MAX_VALUE to not nruncate
 * @return Features array for the given input String
 */
public INDArray loadFeaturesFromString(String reviewContents, int maxLength){
	List<String> tokens = tokenizerFactory.create(reviewContents).getTokens();
	List<String> tokensFiltered = new ArrayList<>();
	for(String t : tokens ){
		if(wordVectors.hasWord(t)) tokensFiltered.add(t);
	}
	int outputLength = Math.max(maxLength,tokensFiltered.size());

	INDArray features = Nd4j.create(1, vectorSize, outputLength);

	for( int j=0; j<tokens.size() && j<maxLength; j++ ){
		String token = tokens.get(j);
		INDArray vector = wordVectors.getWordVectorMatrix(token);
		features.put(new INDArrayIndex[]{NDArrayIndex.point(0), NDArrayIndex.all(), NDArrayIndex.point(j)}, vector);
	}

	return features;
}
 
Example 7
Project: FederatedAndroidTrainer   File: SumDataSource.java   View source code 6 votes vote down vote up
@Override
public FederatedDataSet getTrainingData() {
    Random rand = new Random(seed);
    double[] sum = new double[N_SAMPLES];
    double[] input1 = new double[N_SAMPLES];
    double[] input2 = new double[N_SAMPLES];
    for (int i = 0; i < N_SAMPLES; i++) {
        input1[i] = MIN_RANGE + (MAX_RANGE - MIN_RANGE) * rand.nextDouble();
        input2[i] = MIN_RANGE + (MAX_RANGE - MIN_RANGE) * rand.nextDouble();
        sum[i] = input1[i] + input2[i];
    }
    INDArray inputNDArray1 = Nd4j.create(input1, new int[]{N_SAMPLES, 1});
    INDArray inputNDArray2 = Nd4j.create(input2, new int[]{N_SAMPLES, 1});
    INDArray inputNDArray = Nd4j.hstack(inputNDArray1, inputNDArray2);
    INDArray outPut = Nd4j.create(sum, new int[]{N_SAMPLES, 1});
    DataSet dataSet = new DataSet(inputNDArray, outPut);
    dataSet.shuffle();
    return new FederatedDataSetImpl(dataSet);
}
 
Example 8
Project: FederatedAndroidTrainer   File: SumDataSource.java   View source code 6 votes vote down vote up
@Override
public FederatedDataSet getTestData() {
    Random rand = new Random(seed);
    int numSamples = N_SAMPLES/10;
    double[] sum = new double[numSamples];
    double[] input1 = new double[numSamples];
    double[] input2 = new double[numSamples];
    for (int i = 0; i < numSamples; i++) {
        input1[i] = MIN_RANGE + (MAX_RANGE - MIN_RANGE) * rand.nextDouble();
        input2[i] = MIN_RANGE + (MAX_RANGE - MIN_RANGE) * rand.nextDouble();
        sum[i] = input1[i] + input2[i];
    }
    INDArray inputNDArray1 = Nd4j.create(input1, new int[]{numSamples, 1});
    INDArray inputNDArray2 = Nd4j.create(input2, new int[]{numSamples, 1});
    INDArray inputNDArray = Nd4j.hstack(inputNDArray1, inputNDArray2);
    INDArray outPut = Nd4j.create(sum, new int[]{numSamples, 1});
    return new FederatedDataSetImpl(new DataSet(inputNDArray, outPut));
}
 
Example 9
Project: SKIL_CE_1.0.0_Examples   File: NormalizeUciData.java   View source code 5 votes vote down vote up
private String toCsv(DataSetIterator it, List<Integer> labels, int[] shape) {
    if (it.numExamples() != labels.size()) {
        throw new IllegalStateException(
                String.format("numExamples == %d != labels.size() == %d",
                        it.numExamples(), labels.size()));
    }

    StringBuffer sb = new StringBuffer();
    int l = 0;

    while (it.hasNext()) {
        INDArray features = it.next(1).getFeatures();

        if (!(Arrays.equals(features.shape(), shape))) {
            throw new IllegalStateException(String.format("wrong shape: got %s, expected",
                    Arrays.toString(features.shape()), Arrays.toString(shape)));
        }

        // Prepend the label
        sb.append(labels.get(l)).append(": ");
        l++;

        for (int i=0; i<features.columns(); i++) {
            sb.append(features.getColumn(i));

            if (i < features.columns()-1) {
                sb.append(", ");
            }
        }

        sb.append("\n");
    }

    return sb.toString();
}
 
Example 10
Project: neo4j-ml-procedures   File: DL4JMLModel.java   View source code 5 votes vote down vote up
@Override
    protected Object doPredict(List<String> line) {
        try {
            ListStringSplit input = new ListStringSplit(Collections.singletonList(line));
            ListStringRecordReader rr = new ListStringRecordReader();
            rr.initialize(input);
            DataSetIterator iterator = new RecordReaderDataSetIterator(rr, 1);

            DataSet ds = iterator.next();
            INDArray prediction = model.output(ds.getFeatures());

            DataType outputType = types.get(this.output);
            switch (outputType) {
                case _float : return prediction.getDouble(0);
                case _class: {
                    int numClasses = 2;
                    double max = 0;
                    int maxIndex = -1;
                    for (int i=0;i<numClasses;i++) {
                        if (prediction.getDouble(i) > max) {maxIndex = i; max = prediction.getDouble(i);}
                    }
                    return maxIndex;
//                    return prediction.getInt(0,1); // numberOfClasses
                }
                default: throw new IllegalArgumentException("Output type not yet supported "+outputType);
            }
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }
 
Example 11
Project: par2hier   File: LabelSeeker.java   View source code 5 votes vote down vote up
/**
 * This method accepts vector, that represents any document,
 * and returns distances between this document, and previously trained categories
 * @return
 */
public List<Pair<String, Double>> getScores(@NonNull INDArray vector) {
    List<Pair<String, Double>> result = new ArrayList<>();
    for (String label: labelsUsed) {
        INDArray vecLabel = lookupTable.vector(label);
        if (vecLabel == null) throw new IllegalStateException("Label '"+ label+"' has no known vector!");

        double sim = Transforms.cosineSim(vector, vecLabel);
        result.add(new Pair<String, Double>(label, sim));
    }
    return result;
}
 
Example 12
Project: ojAlgo-extensions   File: ArrayND.java   View source code 5 votes vote down vote up
public INDArray add(final Number n) {
    final ArrayAnyD<N> newDelegate = this.copy();
    final N val = myFactory.scalar().cast(n);
    final UnaryFunction<N> modifier = myFactory.function().add().second(val);
    newDelegate.modifyAll(modifier);
    return new ArrayND<>(myFactory, newDelegate);
}
 
Example 13
Project: subra   File: PlayerCommand.java   View source code 5 votes vote down vote up
/**
 * @param velocityVector The velocity vector to extract the velocity components from.
 * @param flatKick       The flat-kick strength as a percentage (between 0 and 1).
 * @param chipKick       The chip-kick strength as a percentage (between 0 and 1).
 * @param dribblerSpin   The dribbler spin as a percentage with the sign indicating the
 *                       direction of spin (between -1 and 1)
 */
public State(
    final INDArray velocityVector,
    final float flatKick,
    final float chipKick,
    final float dribblerSpin
) {
  this(
      velocityVector.getFloat(0, 0),
      velocityVector.getFloat(1, 0),
      velocityVector.getFloat(2, 0),
      flatKick,
      chipKick,
      dribblerSpin);
}
 
Example 14
Project: Word2VecfJava   File: WordVectorSerializer.java   View source code 5 votes vote down vote up
/** @return {@link Word2Vec} */
public static Word2Vec loadWord2VecModel (String wordFilePath, boolean binary) {
	Word2Vec model = null;
	try {
		Pair<List<String>, INDArray> pair;
		if (binary) pair = fromBinary(wordFilePath);
		else pair = fromText(wordFilePath);
		model = new Word2Vec(pair.getValue().columns(), pair.getKey(), pair.getValue(), true);
	} catch (IOException e) {
		e.printStackTrace();
	}
	return model;
}
 
Example 15
Project: algieba   File: UniformFlowPotentialField.java   View source code 5 votes vote down vote up
/**
 * See <a href="http://www.wolframalpha.com/input/?i=-1*(x*Cos%5BA%5D+%2B+y*Sin%5BA%5D)">this equation.</a>
 *
 * @param positionVector The position vector at which to compute the potential.
 */
@Override
public double getPotential(final INDArray positionVector) {
  return positionVector
      .mul(this.multiplier)
      .sumNumber().doubleValue();
}
 
Example 16
Project: NeuralNetworksLite   File: TriangleWaveMathFunction.java   View source code 5 votes vote down vote up
@Override
public INDArray getFunctionValues(final INDArray x) {
    final double period = 6.0;
    final double[] xd = x.data().asDouble();
    final double[] yd = new double[xd.length];
    for (int i = 0; i < xd.length; i++) {
        yd[i] = Math.abs(2 * (xd[i] / period - Math.floor(xd[i] / period + 0.5)));
    }
    return Nd4j.create(yd, new int[]{xd.length, 1});  //Column vector
}
 
Example 17
Project: algieba   File: GaussianPotentialField.java   View source code 5 votes vote down vote up
/**
 * See <a href="http://www.wolframalpha.com/input/?i=integral+of+H+*+e%5E(-1*((A*t%2BB)%5E2%2B(C*t%2BD)%5E2))dt">
 * this equation</a>, where <code>x = A*t + B</code> and <code>y = C*t + D</code>.
 *
 * @param lowerBound The lower bound of the integral.
 * @param upperBound The upper bound of the integral.
 * @return A {@link UnaryOperator} representing the result of the indefinite integral for
 *     which the value can be computed.
 */
@Override
public UnaryOperator<Double> computeLineIntegral(
    final INDArray lowerBound,
    final INDArray upperBound
) {
  return input -> {
    // compute parametric coefficients
    final double A = upperBound.getDouble(0, 0) - lowerBound.getDouble(0, 0);
    final double B = lowerBound.getDouble(0, 0);
    final double C = upperBound.getDouble(1, 0) - lowerBound.getDouble(1, 0);
    final double D = lowerBound.getDouble(1, 0);

    // compute leading coefficients
    final double coeff = this.height * Math.sqrt(Math.PI)
        / (2 * Math.sqrt(Math.pow(A, 2) + Math.pow(C, 2)));

    // compute exponential expression
    final double eTerm = Math.exp(
        -1 * Math.pow(B * C - A * D, 2) / (Math.pow(A, 2) + Math.pow(C, 2)));

    // compute erf expression
    final double erf = GaussianDistribution.erf(
        Math.pow(A, 2) * input
            + Math.pow(C, 2) * input
            + A * B
            + C * D);

    // compute unscaled line integral
    return coeff * eTerm * erf;
  };
}
 
Example 18
Project: NeuralNetworksLite   File: RestrictedBoltzmannMachine.java   View source code 5 votes vote down vote up
private INDArray binomial(INDArray x, Random rng) {
    INDArray y = Nd4j.create(new double[x.rows() * x.columns()], new int[] { x.rows(), x.columns() });
    for (int i = 0; i < x.rows(); i++) {
        for (int j = 0; j < x.columns(); j++) { y.put(i, j, RandomGenerator.binomial(1, x.getDouble(i, j), rng)); }
    }
    return y;
}
 
Example 19
Project: algieba   File: GaussianPotentialField.java   View source code 5 votes vote down vote up
/**
 * {@inheritDoc}
 */
@Override
public INDArray toGlobalFrame(final INDArray positionVector) {
  return Vectors.rotatePlanarCartesian(
      positionVector
          .mul(Vectors.columnVector(this.getLength(), this.getWidth())),
      -1 * this.getAngle());
}
 
Example 20
Project: algieba   File: BoundaryPotentialField.java   View source code 5 votes vote down vote up
/**
 * {@inheritDoc}
 */
@Override
public double getPotential(final INDArray positionVector) {
  return this.getPotential(positionVector.getDouble(0, 0))
      + this.getPotential(this.getWidth() - positionVector.getDouble(0, 0))
      + this.getPotential(positionVector.getDouble(1, 0))
      + this.getPotential(this.getLength() - positionVector.getDouble(1, 0));
}
 
Example 21
Project: NeuralNetworksLite   File: DenseLayer.java   View source code 5 votes vote down vote up
public DenseLayer(int nIn, int nOut, INDArray W, INDArray b, Random rng, Activation activationMethod) {
    this.nIn = nIn;
    this.nOut = nOut;
    this.rng = rng == null ? new Random(1234) : rng;
    this.W = W == null ? WeightInit.apply(nIn, nOut, rng, WeightInit.UNIFORM) : W;
    this.b = b == null ? BiasInit.apply(nOut, null, BiasInit.ZERO) : b;
    activationMethod = activationMethod == null ? Activation.Sigmoid : activationMethod;
    this.activation = Activation.active(activationMethod);
    this.dactivation = Activation.dactive(activationMethod);
}
 
Example 22
Project: algieba   File: BoundaryPotentialField.java   View source code 5 votes vote down vote up
/**
 * {@inheritDoc}
 */
@Override
public double getLineIntegral(final INDArray lowerBound, final INDArray upperBound) {
  return Math.sqrt(Transforms.pow(upperBound.sub(lowerBound), 2).sumNumber().doubleValue())
      * (this.computeNorthernLineIntegralSegment(lowerBound, upperBound).apply(1d)
      + this.computeSouthernLineIntegralSegment(lowerBound, upperBound).apply(1d)
      + this.computeEasternLineIntegralSegment(lowerBound, upperBound).apply(1d)
      + this.computeWesternLineIntegralSegment(lowerBound, upperBound).apply(1d)
      - this.computeNorthernLineIntegralSegment(lowerBound, upperBound).apply(0d)
      - this.computeSouthernLineIntegralSegment(lowerBound, upperBound).apply(0d)
      - this.computeEasternLineIntegralSegment(lowerBound, upperBound).apply(0d)
      - this.computeWesternLineIntegralSegment(lowerBound, upperBound).apply(0d));
}
 
Example 23
Project: hack-a-drone   File: DeepLearning.java   View source code 5 votes vote down vote up
/**
 * Pre process the image to create a matrix
 */
private INDArray processImage(BufferedImage bufferedImage) {
    //TODO Deep Learning Challenge Part 2: Normalize the Image
    INDArray imageMatrix = null;


    normalizeImage(imageMatrix);
    return imageMatrix;
}
 
Example 24
Project: algieba   File: ExtendedKalmanFilter.java   View source code 5 votes vote down vote up
/**
 * Computes the filtered {@link Distribution} (mean and covariance) using the supplied state- and
 * measurement-transition functions, their respective Jacobians and the supplied control input,
 * process noise, observation noise, and current state.
 *
 * @param stateTransition               A {@link BiFunction} which takes as its first argument the
 *                                      previous state, and the control input as its second
 *                                      argument. The result is the next state.
 * @param measurementTransition         A {@link Function} which takes as its first argument the
 *                                      current state and returns the measurement.
 * @param stateTransitionJacobian       The Jacobian representing the state-transition.
 * @param measurementTransitionJacobian The Jacobian representing the measurement-transition.
 * @param controlInput                  The control input.
 * @param processCovariance             the process covariance
 * @param observationNoise              The {@link Distribution} of observation noise
 * @param state                         The {@link Distribution} of the latest state.
 * @return The filtered state.
 */
public Distribution apply(
    final BiFunction<INDArray, INDArray, INDArray> stateTransition,
    final Function<INDArray, INDArray> measurementTransition,
    final INDArray stateTransitionJacobian,
    final INDArray measurementTransitionJacobian,
    final INDArray controlInput,
    final INDArray processCovariance,
    final Distribution observationNoise,
    final Distribution state
) {
  final INDArray projectedState = stateTransition.apply(state.getMean(), controlInput);

  final INDArray projectedErrorCovariance = stateTransitionJacobian
      .mmul(state.getMean()
          .mmul(stateTransitionJacobian.transpose()))
      .add(processCovariance);

  final INDArray kalmanGain = projectedErrorCovariance
      .mmul(measurementTransitionJacobian.transpose()
          .mmul(InvertMatrix.invert(measurementTransitionJacobian
              .mmul(projectedErrorCovariance
                  .mmul(measurementTransitionJacobian.transpose()))
              .add(observationNoise.getCovariance()), false)));

  final INDArray estimatedState = projectedState
      .add(kalmanGain
          .mmul(measurementTransition.apply(state.getMean())
              .sub(measurementTransition.apply(projectedState))));

  final INDArray estimatedErrorCovariance = Nd4j.eye(estimatedState.rows())
      .sub(kalmanGain.
          mul(measurementTransitionJacobian))
      .mmul(projectedErrorCovariance);

  return new SimpleDistribution(estimatedState, estimatedErrorCovariance);
}
 
Example 25
Project: NeuralNetworksLite   File: SawtoothMathFunction.java   View source code 5 votes vote down vote up
@Override
public INDArray getFunctionValues(final INDArray x) {
    final double sawtoothPeriod = 4.0;
    //the input data is the intervals at which the wave is being calculated
    final double[] xd2 = x.data().asDouble();
    final double[] yd2 = new double[xd2.length];
    for (int i = 0; i < xd2.length; i++) {  //Using the sawtooth wave function, find the values at the given intervals
        yd2[i] = 2 * (xd2[i] / sawtoothPeriod - Math.floor(xd2[i] / sawtoothPeriod + 0.5));
    }
    return Nd4j.create(yd2, new int[]{xd2.length, 1});  //Column vector
}
 
Example 26
Project: StockPrediction   File: StockDataSetIterator.java   View source code 5 votes vote down vote up
@Override
public DataSet next(int num) {
    if (exampleStartOffsets.size() == 0) throw new NoSuchElementException();
    int actualMiniBatchSize = Math.min(num, exampleStartOffsets.size());
    INDArray input = Nd4j.create(new int[] {actualMiniBatchSize, VECTOR_SIZE, exampleLength}, 'f');
    INDArray label;
    if (category.equals(PriceCategory.ALL)) label = Nd4j.create(new int[] {actualMiniBatchSize, VECTOR_SIZE, exampleLength}, 'f');
    else label = Nd4j.create(new int[] {actualMiniBatchSize, predictLength, exampleLength}, 'f');
    for (int index = 0; index < actualMiniBatchSize; index++) {
        int startIdx = exampleStartOffsets.removeFirst();
        int endIdx = startIdx + exampleLength;
        StockData curData = train.get(startIdx);
        StockData nextData;
        for (int i = startIdx; i < endIdx; i++) {
            int c = i - startIdx;
            input.putScalar(new int[] {index, 0, c}, (curData.getOpen() - minArray[0]) / (maxArray[0] - minArray[0]));
            input.putScalar(new int[] {index, 1, c}, (curData.getClose() - minArray[1]) / (maxArray[1] - minArray[1]));
            input.putScalar(new int[] {index, 2, c}, (curData.getLow() - minArray[2]) / (maxArray[2] - minArray[2]));
            input.putScalar(new int[] {index, 3, c}, (curData.getHigh() - minArray[3]) / (maxArray[3] - minArray[3]));
            input.putScalar(new int[] {index, 4, c}, (curData.getVolume() - minArray[4]) / (maxArray[4] - minArray[4]));
            nextData = train.get(i + 1);
            if (category.equals(PriceCategory.ALL)) {
                label.putScalar(new int[] {index, 0, c}, (nextData.getOpen() - minArray[1]) / (maxArray[1] - minArray[1]));
                label.putScalar(new int[] {index, 1, c}, (nextData.getClose() - minArray[1]) / (maxArray[1] - minArray[1]));
                label.putScalar(new int[] {index, 2, c}, (nextData.getLow() - minArray[2]) / (maxArray[2] - minArray[2]));
                label.putScalar(new int[] {index, 3, c}, (nextData.getHigh() - minArray[3]) / (maxArray[3] - minArray[3]));
                label.putScalar(new int[] {index, 4, c}, (nextData.getVolume() - minArray[4]) / (maxArray[4] - minArray[4]));
            } else {
                label.putScalar(new int[]{index, 0, c}, feedLabel(nextData));
            }
            curData = nextData;
        }
        if (exampleStartOffsets.size() == 0) break;
    }
    return new DataSet(input, label);
}
 
Example 27
Project: omr-dataset-tools   File: SubImages.java   View source code 5 votes vote down vote up
/**
 * Report the shape name indicated in the labels vector.
 *
 * @param labels the labels vector (1.0 for a shape, 0.0 for the others)
 * @return the shape name
 */
private OmrShape getShape (INDArray labels)
{
    for (int c = 0; c < numClasses; c++) {
        double val = labels.getDouble(c);

        if (val != 0) {
            return OmrShape.values()[c];
        }
    }

    return null;
}
 
Example 28
Project: omr-dataset-tools   File: Training.java   View source code 5 votes vote down vote up
/**
 * Save to disk the image for a shape not correctly recognized.
 *
 * @param prediction the (wrong) prediction
 * @param rawData    pixels raw data
 * @param folder     target folder for current epoch
 * @throws Exception
 */
private void saveMistake (Prediction prediction,
                          List<Writable> rawData,
                          Path folder)
        throws Exception
{
    RecordMetaDataLine meta = prediction.getRecordMetaData(RecordMetaDataLine.class);
    final int line = meta.getLineNumber();
    final OmrShape predicted = shapeValues[prediction.getPredictedClass()];
    final OmrShape actual = shapeValues[prediction.getActualClass()];
    final Journal.Record record = journal.getRecord(line);
    System.out.println(record + " mistaken for " + predicted);

    if (folder != null) {
        Files.createDirectories(folder);

        // Generate mistaken subimage
        double[] pixels = new double[rawData.size()];

        for (int i = 0; i < pixels.length; i++) {
            pixels[i] = rawData.get(i).toDouble();
        }

        INDArray row = Nd4j.create(pixels);
        BufferedImage img = SubImages.buildSubImage(row);

        // Save subimage to disk, with proper naming
        String name = actual + "-" + line + "-" + predicted + OUTPUT_IMAGES_EXT;
        ImageIO.write(img, OUTPUT_IMAGES_FORMAT, folder.resolve(name).toFile());
    }
}
 
Example 29
Project: FederatedAndroidTrainer   File: IrisModel.java   View source code 5 votes vote down vote up
@Override
public String evaluate(FederatedDataSet federatedDataSet) {
    //evaluate the model on the test set
    DataSet testData = (DataSet) federatedDataSet.getNativeDataSet();
    double score = model.score(testData);
    Evaluation eval = new Evaluation(numClasses);
    INDArray output = model.output(testData.getFeatureMatrix());
    eval.eval(testData.getLabels(), output);
    return "Score: " + score;
}
 
Example 30
Project: ShotgunWSD   File: WordEmbeddingRelatedness.java   View source code 5 votes vote down vote up
public double computeSimilarity(Synset synset1, String word1, Synset synset2, String word2){
    INDArray[] senseEmbeddings = new INDArray[2];
    senseEmbeddings[0] = Nd4j.create(SenseEmbedding.getSenseEmbedding(wordVectors, synset1, word1, senseComputation));
    senseEmbeddings[1] = Nd4j.create(SenseEmbedding.getSenseEmbedding(wordVectors, synset2, word2, senseComputation));

    return computeSimilarity(senseEmbeddings, null, null, 0, 1);
}
 
Example 31
Project: TrumpHallucinator   File: GravelLSTMforTrump.java   View source code 4 votes vote down vote up
/** Generate a sample from the network, given an (optional, possibly null) initialization. Initialization
 * can be used to 'prime' the RNN with a sequence you want to extend/continue.<br>
 * Note that the initalization is used for all samples
 * @param initialization String, may be null. If null, select a random character as initialization for all samples
 * @param charactersToSample Number of characters to sample from network (excluding initialization)
 * @param net MultiLayerNetwork with one or more GravesLSTM/RNN layers and a softmax output layer
 * @param iter CharacterIterator. Used for going from indexes back to characters
 */
private static String[] sampleCharactersFromNetwork(String initialization, MultiLayerNetwork net,
                                                    CharacterIterator iter, Random rng, int charactersToSample, int numSamples ){
    //Set up initialization. If no initialization: use a random character
    if( initialization == null ){
        initialization = String.valueOf(iter.getRandomCharacter());
    }

    //Create input for initialization
    INDArray initializationInput = Nd4j.zeros(numSamples, iter.inputColumns(), initialization.length());
    char[] init = initialization.toCharArray();
    for( int i=0; i<init.length; i++ ){
        int idx = iter.convertCharacterToIndex(init[i]);
        for( int j=0; j<numSamples; j++ ){
            initializationInput.putScalar(new int[]{j,idx,i}, 1.0f);
        }
    }

    StringBuilder[] sb = new StringBuilder[numSamples];
    for( int i=0; i<numSamples; i++ ) sb[i] = new StringBuilder(initialization);

    //Sample from network (and feed samples back into input) one character at a time (for all samples)
    //Sampling is done in parallel here
    net.rnnClearPreviousState();
    INDArray output = net.rnnTimeStep(initializationInput);
    output = output.tensorAlongDimension(output.size(2)-1,1,0);	//Gets the last time step output

    for( int i=0; i<charactersToSample; i++ ){
        //Set up next input (single time step) by sampling from previous output
        INDArray nextInput = Nd4j.zeros(numSamples,iter.inputColumns());
        //Output is a probability distribution. Sample from this for each example we want to generate, and add it to the new input
        for( int s=0; s<numSamples; s++ ){
            double[] outputProbDistribution = new double[iter.totalOutcomes()];
            for( int j=0; j<outputProbDistribution.length; j++ ) outputProbDistribution[j] = output.getDouble(s,j);
            int sampledCharacterIdx = sampleFromDistribution(outputProbDistribution,rng);

            nextInput.putScalar(new int[]{s,sampledCharacterIdx}, 1.0f);		//Prepare next time step input
            sb[s].append(iter.convertIndexToCharacter(sampledCharacterIdx));	//Add sampled character to StringBuilder (human readable output)
        }

        output = net.rnnTimeStep(nextInput);	//Do one time step of forward pass
    }

    String[] out = new String[numSamples];
    for( int i=0; i<numSamples; i++ ) out[i] = sb[i].toString();
    return out;
}
 
Example 32
Project: ojAlgo-extensions   File: ArrayND.java   View source code 4 votes vote down vote up
public INDArray gti(final Number other) {
    // TODO Auto-generated method stub
    return null;
}
 
Example 33
Project: ojAlgo-extensions   File: ComplexArrayND.java   View source code 4 votes vote down vote up
@Override
public IComplexNDArray muli(final INDArray other) {
    // TODO Auto-generated method stub
    return null;
}
 
Example 34
Project: ojAlgo-extensions   File: ComplexArrayND.java   View source code 4 votes vote down vote up
@Override
public INDArray put(final int i, final int j, final Number element) {
    // TODO Auto-generated method stub
    return null;
}
 
Example 35
Project: ojAlgo-extensions   File: ComplexArrayND.java   View source code 4 votes vote down vote up
@Override
public IComplexNDArray rsubi(final INDArray other) {
    // TODO Auto-generated method stub
    return null;
}
 
Example 36
Project: ojAlgo-extensions   File: ArrayND.java   View source code 4 votes vote down vote up
public INDArray mul(final Number n, final INDArray result) {
    // TODO Auto-generated method stub
    return null;
}
 
Example 37
Project: ShotgunWSD   File: WordEmbeddingRelatedness.java   View source code 4 votes vote down vote up
public double computeSimilarity(Object[] synsetRepresentation, String[] windowWords, int[] synset2WordIndex, int k, int j){
    INDArray[] windowWordsSenseEmbeddings = (INDArray[])synsetRepresentation;

    return Transforms.cosineSim(windowWordsSenseEmbeddings[k], windowWordsSenseEmbeddings[j]);
}
 
Example 38
Project: ojAlgo-extensions   File: ComplexArrayND.java   View source code 4 votes vote down vote up
@Override
public IComplexNDArray rsubi(final INDArray other, final INDArray result) {
    // TODO Auto-generated method stub
    return null;
}
 
Example 39
Project: ojAlgo-extensions   File: ArrayND.java   View source code 4 votes vote down vote up
public INDArray repeat(final int... repeats) {
    // TODO Auto-generated method stub
    return null;
}
 
Example 40
Project: ojAlgo-extensions   File: ArrayND.java   View source code 4 votes vote down vote up
public INDArray broadcast(final int... shape) {
    // TODO Auto-generated method stub
    return null;
}