Java Code Examples for org.opencv.core.Core#norm()

The following examples show how to use org.opencv.core.Core#norm() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: RecognitionThread.java    From ml-authentication with Apache License 2.0 6 votes vote down vote up
/**
 * Returns the recognized Student if the cosineSimilarity was above the threshold
 * @param featureVectorToRecognize
 * @return
 */
private synchronized List<Student> getMostSimilarStudentIfInThreshold(Mat featureVectorToRecognize){
    List<StudentImageCollectionEvent> studentImageCollectionEvents = studentImageCollectionEventDao.queryBuilder().where(StudentImageCollectionEventDao.Properties.MeanFeatureVector.isNotNull()).list();
    List<Student> studentsInThreshold = new ArrayList<>();
    for (StudentImageCollectionEvent studentImageCollectionEvent : studentImageCollectionEvents){
        Student currentStudent = studentImageCollectionEvent.getStudent();
        // Skip if the students are identical (same UniqueId)
        if (!areStudentsIdentical(currentStudent)){
            List<Float> featureVectorList = gson.fromJson(studentImageCollectionEvent.getMeanFeatureVector(), new TypeToken<List<Float>>(){}.getType());
            Mat featureVector = Converters.vector_float_to_Mat(featureVectorList);
            double dotProduct = featureVector.dot(featureVectorToRecognize);
            double normFeatureVector = Core.norm(featureVector, Core.NORM_L2);
            double normFeatureVectorToRecognize = Core.norm(featureVectorToRecognize, Core.NORM_L2);
            double cosineSimilarity = dotProduct / (normFeatureVector * normFeatureVectorToRecognize);
            double absoluteCosineSimilarity = Math.abs(cosineSimilarity);
            Log.i(getClass().getName(), "getMostSimilarStudentIfInThreshold: absoluteCosineSimilarity: " + absoluteCosineSimilarity + " with Student: " + currentStudent.getUniqueId());
            if (absoluteCosineSimilarity > SIMILARITY_THRESHOLD){
                studentsInThreshold.add(currentStudent);
            }
        } else {
            Log.i(getClass().getName(), "getMostSimilarStudentIfInThreshold: currentStudent: " + currentStudent.getUniqueId() + " was skipped because it is identical with the student: " + student.getUniqueId());
        }
    }
    return studentsInThreshold;
}
 
Example 2
Source File: Eigenfaces.java    From Android-Face-Recognition-with-Deep-Learning-Library with Apache License 2.0 6 votes vote down vote up
public String recognize(Mat img, String expectedLabel){
    // Ignore
    img = img.reshape(1,1);
    // Subtract mean
    img.convertTo(img, CvType.CV_32F);
    Core.subtract(img, Psi, img);
    // Project to subspace
    Mat projected = getFeatureVector(img);
    // Save all points of image for tSNE
    img.convertTo(img, CvType.CV_8U);
    addImage(projected, expectedLabel, true);
    //addImage(projected, expectedLabel);
    Mat distance = new Mat(Omega.rows(), 1, CvType.CV_64FC1);
    for (int i=0; i<Omega.rows(); i++){
        double dist = Core.norm(projected.row(0), Omega.row(i), Core.NORM_L2);
        distance.put(i, 0, dist);
    }
    Mat sortedDist = new Mat(Omega.rows(), 1, CvType.CV_8UC1);
    Core.sortIdx(distance, sortedDist, Core.SORT_EVERY_COLUMN + Core.SORT_ASCENDING);
    // Give back the name of the found person
    int index = (int)(sortedDist.get(0,0)[0]);
    return labelMap.getKey(labelList.get(index));
}
 
Example 3
Source File: RecognitionThread.java    From ml-authentication with Apache License 2.0 6 votes vote down vote up
/**
 * Returns the recognized Student if the cosineSimilarity was above the threshold
 * @param featureVectorToRecognize
 * @return
 */
private synchronized List<Student> getMostSimilarStudentIfInThreshold(Mat featureVectorToRecognize){
    List<StudentImageCollectionEvent> studentImageCollectionEvents = studentImageCollectionEventDao.queryBuilder().where(StudentImageCollectionEventDao.Properties.MeanFeatureVector.isNotNull()).list();
    List<Student> studentsInThreshold = new ArrayList<>();
    for (StudentImageCollectionEvent studentImageCollectionEvent : studentImageCollectionEvents){
        Student currentStudent = studentImageCollectionEvent.getStudent();
        // Skip if the students are identical (same UniqueId)
        if (!areStudentsIdentical(currentStudent)){
            List<Float> featureVectorList = gson.fromJson(studentImageCollectionEvent.getMeanFeatureVector(), new TypeToken<List<Float>>(){}.getType());
            Mat featureVector = Converters.vector_float_to_Mat(featureVectorList);
            double dotProduct = featureVector.dot(featureVectorToRecognize);
            double normFeatureVector = Core.norm(featureVector, Core.NORM_L2);
            double normFeatureVectorToRecognize = Core.norm(featureVectorToRecognize, Core.NORM_L2);
            double cosineSimilarity = dotProduct / (normFeatureVector * normFeatureVectorToRecognize);
            double absoluteCosineSimilarity = Math.abs(cosineSimilarity);
            Log.i(getClass().getName(), "getMostSimilarStudentIfInThreshold: absoluteCosineSimilarity: " + absoluteCosineSimilarity + " with Student: " + currentStudent.getUniqueId());
            if (absoluteCosineSimilarity > SIMILARITY_THRESHOLD){
                studentsInThreshold.add(currentStudent);
            }
        } else {
            Log.i(getClass().getName(), "getMostSimilarStudentIfInThreshold: currentStudent: " + currentStudent.getUniqueId() + " was skipped because it is identical with the student: " + student.getUniqueId());
        }
    }
    return studentsInThreshold;
}
 
Example 4
Source File: CameraCalibrator.java    From OpenCV-AndroidSamples with MIT License 6 votes vote down vote up
private double computeReprojectionErrors(List<Mat> objectPoints,
                                         List<Mat> rvecs, List<Mat> tvecs, Mat perViewErrors) {
    MatOfPoint2f cornersProjected = new MatOfPoint2f();
    double totalError = 0;
    double error;
    float viewErrors[] = new float[objectPoints.size()];

    MatOfDouble distortionCoefficients = new MatOfDouble(mDistortionCoefficients);
    int totalPoints = 0;
    for (int i = 0; i < objectPoints.size(); i++) {
        MatOfPoint3f points = new MatOfPoint3f(objectPoints.get(i));
        Calib3d.projectPoints(points, rvecs.get(i), tvecs.get(i),
                mCameraMatrix, distortionCoefficients, cornersProjected);
        error = Core.norm(mCornersBuffer.get(i), cornersProjected, Core.NORM_L2);

        int n = objectPoints.get(i).rows();
        viewErrors[i] = (float) Math.sqrt(error * error / n);
        totalError  += error * error;
        totalPoints += n;
    }
    perViewErrors.create(objectPoints.size(), 1, CvType.CV_32FC1);
    perViewErrors.put(0, 0, viewErrors);

    return Math.sqrt(totalError / totalPoints);
}
 
Example 5
Source File: Recognition.java    From classchecks with Apache License 2.0 5 votes vote down vote up
/**
 * 计算两幅图像像素之间的相似性
 * <p>使用基于L2范数的相对错误评价标准,该标准是将两个图像的相应像素值相减,并对所
 * 得的差值求平方和,然后在对结果求平方根</p>
 * @param A
 * @param B
 * @return
 */
public static double getSimilarity(Mat A, Mat B) {
	if(A.rows() > 0 && A.rows() == B.rows() && A.cols() > 0 && A.cols() == B.cols()) {
		double errorL2 = Core.norm(A, B, CV_L2);
		double similarity = errorL2 / (double)(A.rows() * A.cols());
		return similarity;
	}
	return INVALID_IDENTIFICATION;
}
 
Example 6
Source File: FaceDetection.java    From Android-Face-Recognition-with-Deep-Learning-Library with Apache License 2.0 5 votes vote down vote up
public Eyes getEyes(Mat img){
    double halfWidth = img.cols() / 2;
    double height = img.rows();
    double[] values = new double[4];
    values[0] = 0;
    values[1] = 0;
    values[2] = halfWidth;
    values[3] = height;
    Rect rightHalf = new Rect(values);
    values[0] = halfWidth;
    Rect leftHalf = new Rect(values);
    MatOfRect rightEyes = new MatOfRect();
    MatOfRect leftEyes = new MatOfRect();

    Mat rightHalfImg = img.submat(rightHalf);
    rightEyeDetector.detectMultiScale(rightHalfImg, rightEyes);
    Mat leftHalfImg = img.submat(leftHalf);
    leftEyeDetector.detectMultiScale(leftHalfImg, leftEyes);

    if (rightEyes.empty() || leftEyes.empty() || rightEyes.toArray().length > 1 || leftEyes.toArray().length > 1){
        return null;
    }

    Rect rightEye = rightEyes.toArray()[0];
    Rect leftEye = leftEyes.toArray()[0];

    MatOfFloat rightPoint = new MatOfFloat(rightEye.x + rightEye.width / 2, rightEye.y + rightEye.height / 2);
    MatOfFloat leftPoint = new MatOfFloat(img.cols() / 2 + leftEye.x + leftEye.width / 2, leftEye.y + leftEye.height / 2);

    MatOfFloat diff = new MatOfFloat();
    Core.subtract(leftPoint, rightPoint, diff);
    double angle = Core.fastAtan2(diff.toArray()[1], diff.toArray()[0]);
    double dist = Core.norm(leftPoint, rightPoint, Core.NORM_L2);
    Eyes eyes = new Eyes(dist, rightPoint, leftPoint, angle);
    return eyes;
}