ch.zhaw.facerecognitionlibrary.Helpers.FileHelper Java Examples

The following examples show how to use ch.zhaw.facerecognitionlibrary.Helpers.FileHelper. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: KNearestNeighbor.java    From Android-Face-Recognition-with-Deep-Learning-Library with Apache License 2.0 6 votes vote down vote up
public KNearestNeighbor(Context context, int method)  {
    this.context = context;
    fh = new FileHelper();
    k = 20;
    trainingList = new Mat();
    testList = new Mat();
    this.labelList = new ArrayList<>();
    this.labelListTest = new ArrayList<>();
    this.labelMap = new OneToOneMap<String, Integer>();
    this.labelMapTest = new OneToOneMap<String, Integer>();
    trainingFile = "knn_traininglist.xml";
    testFile = "knn_testlist.xml";
    this.method = method;
    if(method == RECOGNITION){
        loadFromFile();
    }

}
 
Example #2
Source File: Eigenfaces.java    From Android-Face-Recognition-with-Deep-Learning-Library with Apache License 2.0 6 votes vote down vote up
public void saveToFile(){
    FileHelper fh = new FileHelper();
    fh.saveIntegerList(labelList, fh.createLabelFile(fh.EIGENFACES_PATH, "train"));
    fh.saveLabelMapToFile(fh.EIGENFACES_PATH, labelMap, "train");
    MatName mOmega = new MatName("Omega", Omega);
    MatName mPsi = new MatName("Psi", Psi);
    MatName mEigVectors = new MatName("eigVectors", eigVectors);
    // Save Phi for tSNE
    MatName mPhi = new MatName("Phi", Phi);
    List<MatName> listMat = new ArrayList<MatName>();
    listMat.add(mOmega);
    listMat.add(mPsi);
    listMat.add(mEigVectors);
    listMat.add(mPhi);
    fh.saveMatListToXml(listMat, fh.EIGENFACES_PATH, filename);
}
 
Example #3
Source File: Eigenfaces.java    From Android-Face-Recognition-with-Deep-Learning-Library with Apache License 2.0 6 votes vote down vote up
public void loadFromFile(){
    FileHelper fh = new FileHelper();
    MatName mOmega = new MatName("Omega", Omega);
    MatName mPsi = new MatName("Psi", Psi);
    MatName mEigVectors = new MatName("eigVectors", eigVectors);
    List<MatName> listMat = new ArrayList<MatName>();
    listMat.add(mOmega);
    listMat.add(mPsi);
    listMat.add(mEigVectors);
    listMat = fh.getMatListFromXml(listMat, fh.EIGENFACES_PATH, filename);
    for (MatName mat : listMat){
        switch (mat.getName()){
            case "Omega":
                Omega = mat.getMat();
                break;
            case "Psi":
                Psi = mat.getMat();
                break;
            case "eigVectors":
                eigVectors = mat.getMat();
                break;
        }
    }
    labelList = fh.loadIntegerList(fh.createLabelFile(fh.EIGENFACES_PATH, "train"));
    labelMap = fh.getLabelMapFromFile(fh.EIGENFACES_PATH);
}
 
Example #4
Source File: Caffe.java    From Android-Face-Recognition-with-Deep-Learning-Library with Apache License 2.0 6 votes vote down vote up
public Caffe(Context context, int method) {
    fh = new FileHelper();
    String dataPath = fh.CAFFE_PATH;
    PreferencesHelper preferencesHelper = new PreferencesHelper(context);
    String modelFile = preferencesHelper.getCaffeModelFile();
    String weightsFile = preferencesHelper.getCaffeWeightsFile();
    layer = preferencesHelper.getCaffeOutputLayer();
    float[] meanValues = preferencesHelper.getCaffeMeanValues();

    Boolean classificationMethod = preferencesHelper.getClassificationMethodTFCaffe();

    caffe = new CaffeMobile();
    caffe.setNumThreads(4);
    caffe.loadModel(dataPath + modelFile, dataPath + weightsFile);
    caffe.setMean(meanValues);
    if(classificationMethod){
        rec = new SupportVectorMachine(context, method);
    } else {
        rec = new KNearestNeighbor(context, method);
    }

}
 
Example #5
Source File: StudentImageCollectionActivity.java    From ml-authentication with Apache License 2.0 5 votes vote down vote up
/**
 * Stores all the buffered StudentImages to the file system and database
 */
private synchronized void storeStudentImages(){
    new Thread(new Runnable() {
        @Override
        public void run() {
            StudentImageCollectionEvent studentImageCollectionEvent = new StudentImageCollectionEvent();
            studentImageCollectionEvent.setTime(Calendar.getInstance());
            studentImageCollectionEvent.setDevice(device);
            Long studentImageCollectionEventId = studentImageCollectionEventDao.insert(studentImageCollectionEvent);
            for(int i=0; i<studentImages.size(); i++){
                MatName matName = new MatName(Integer.toString(i), studentImages.get(i));
                FileHelper fileHelper = new FileHelper();
                String wholeFolderPath = StudentHelper.getStudentImageDirectory() + "/" + device.getDeviceId() + "/" + Long.toString(studentImageCollectionEventId);
                new File(wholeFolderPath).mkdirs();
                fileHelper.saveMatToImage(matName, wholeFolderPath + "/");

                String imageUrl = wholeFolderPath + "/" + Integer.toString(i) + ".png";
                StudentImage studentImage = new StudentImage();
                studentImage.setTimeCollected(Calendar.getInstance());
                studentImage.setImageFileUrl(imageUrl);
                studentImage.setStudentImageCollectionEvent(studentImageCollectionEvent);
                studentImageDao.insert(studentImage);
            }
            Log.i(getClass().getName(), "storeStudentImages has finished successfully.");

            // Initiate background job for face recognition training
            BootReceiver.scheduleFaceRecognitionTranining(getApplicationContext());

        }
    }).start();
}
 
Example #6
Source File: RecognitionActivity.java    From Android-Face-Recognition-with-Deep-Learning-Test-Framework with Apache License 2.0 5 votes vote down vote up
@Override
public void onCreate(Bundle savedInstanceState) {
    Log.i(TAG,"called onCreate");
    super.onCreate(savedInstanceState);
    getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
    setContentView(R.layout.recognition_layout);
    progressBar = (ProgressBar)findViewById(R.id.progressBar);
    fh = new FileHelper();
    File folder = new File(fh.getFolderPath());
    if(folder.mkdir() || folder.isDirectory()){
        Log.i(TAG,"New directory for photos created");
    } else {
        Log.i(TAG,"Photos directory already existing");
    }
    mRecognitionView = (CustomCameraView) findViewById(R.id.RecognitionView);
    // Use camera which is selected in settings
    SharedPreferences sharedPref = PreferenceManager.getDefaultSharedPreferences(this);
    front_camera = sharedPref.getBoolean("key_front_camera", true);
    night_portrait = sharedPref.getBoolean("key_night_portrait", false);
    exposure_compensation = Integer.valueOf(sharedPref.getString("key_exposure_compensation", "20"));

    if (front_camera){
        mRecognitionView.setCameraIndex(CameraBridgeViewBase.CAMERA_ID_FRONT);
    } else {
        mRecognitionView.setCameraIndex(CameraBridgeViewBase.CAMERA_ID_BACK);
    }
    mRecognitionView.setVisibility(SurfaceView.VISIBLE);
    mRecognitionView.setCvCameraViewListener(this);

    int maxCameraViewWidth = Integer.parseInt(sharedPref.getString("key_maximum_camera_view_width", "640"));
    int maxCameraViewHeight = Integer.parseInt(sharedPref.getString("key_maximum_camera_view_height", "480"));
    mRecognitionView.setMaxFrameSize(maxCameraViewWidth, maxCameraViewHeight);
}
 
Example #7
Source File: SupportVectorMachine.java    From Android-Face-Recognition-with-Deep-Learning-Library with Apache License 2.0 5 votes vote down vote up
public SupportVectorMachine(Context context, int method) {
    preferencesHelper = new PreferencesHelper(context);
    fh = new FileHelper();
    trainingFile = fh.createSvmTrainingFile();
    predictionFile = fh.createSvmPredictionFile();
    testFile = fh.createSvmTestFile();
    trainingList = new ArrayList<>();
    testList = new ArrayList<>();
    labelMap = new OneToOneMap<String, Integer>();
    labelMapTest = new OneToOneMap<String, Integer>();
    this.method = method;
    if(method == RECOGNITION){
        loadFromFile();
    }
}
 
Example #8
Source File: Eigenfaces.java    From Android-Face-Recognition-with-Deep-Learning-Library with Apache License 2.0 5 votes vote down vote up
@Override
public void saveTestData() {
    FileHelper fh = new FileHelper();
    fh.saveIntegerList(labelListTest, fh.createLabelFile(fh.EIGENFACES_PATH, "test"));
    fh.saveLabelMapToFile(fh.EIGENFACES_PATH, labelMapTest, "test");
    MatName mTestList = new MatName("TestList", testList);
    List<MatName> listMat = new ArrayList<>();
    listMat.add(mTestList);
    fh.saveMatListToXml(listMat, fh.EIGENFACES_PATH, "testlist.xml");
}
 
Example #9
Source File: StudentImageCollectionActivity.java    From ml-authentication with Apache License 2.0 5 votes vote down vote up
/**
 * Stores all the buffered StudentImages to the file system and database
 */
private synchronized void storeStudentImages(){
    new Thread(new Runnable() {
        @Override
        public void run() {
            StudentImageCollectionEvent studentImageCollectionEvent = new StudentImageCollectionEvent();
            studentImageCollectionEvent.setTime(Calendar.getInstance());
            studentImageCollectionEvent.setDevice(device);
            Long studentImageCollectionEventId = studentImageCollectionEventDao.insert(studentImageCollectionEvent);
            for(int i=0; i<studentImages.size(); i++){
                MatName matName = new MatName(Integer.toString(i), studentImages.get(i));
                FileHelper fileHelper = new FileHelper();
                String wholeFolderPath = StudentHelper.getStudentImageDirectory() + "/" + device.getDeviceId() + "/" + Long.toString(studentImageCollectionEventId);
                new File(wholeFolderPath).mkdirs();
                fileHelper.saveMatToImage(matName, wholeFolderPath + "/");

                String imageUrl = wholeFolderPath + "/" + Integer.toString(i) + ".png";
                StudentImage studentImage = new StudentImage();
                studentImage.setTimeCollected(Calendar.getInstance());
                studentImage.setImageFileUrl(imageUrl);
                studentImage.setStudentImageCollectionEvent(studentImageCollectionEvent);
                studentImageDao.insert(studentImage);
            }
            Log.i(getClass().getName(), "storeStudentImages has finished successfully.");

            // Initiate background job for face recognition training
            BootReceiver.scheduleFaceRecognitionTranining(getApplicationContext());

        }
    }).start();
}
 
Example #10
Source File: AddPersonPreviewActivity.java    From Android-Face-Recognition-with-Deep-Learning-Test-Framework with Apache License 2.0 4 votes vote down vote up
@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_add_person_preview);

    Intent intent = getIntent();
    folder = intent.getStringExtra("Folder");
    if(folder.equals("Test")){
        subfolder = intent.getStringExtra("Subfolder");
    }
    name = intent.getStringExtra("Name");
    method = intent.getIntExtra("Method", 0);
    capturePressed = false;
    if(method == MANUALLY){
        btn_Capture = (ImageButton)findViewById(R.id.btn_Capture);
        btn_Capture.setVisibility(View.VISIBLE);
        btn_Capture.setOnClickListener(new View.OnClickListener() {
            @Override
            public void onClick(View v) {
                capturePressed = true;
            }
        });
    }

    fh = new FileHelper();
    total = 0;
    lastTime = new Date().getTime();

    SharedPreferences sharedPrefs = PreferenceManager.getDefaultSharedPreferences(getApplicationContext());
    timerDiff = Integer.valueOf(sharedPrefs.getString("key_timerDiff", "500"));

    mAddPersonView = (CustomCameraView) findViewById(R.id.AddPersonPreview);
    // Use camera which is selected in settings
    SharedPreferences sharedPref = PreferenceManager.getDefaultSharedPreferences(this);
    front_camera = sharedPref.getBoolean("key_front_camera", true);

    numberOfPictures = Integer.valueOf(sharedPref.getString("key_numberOfPictures", "100"));

    night_portrait = sharedPref.getBoolean("key_night_portrait", false);
    exposure_compensation = Integer.valueOf(sharedPref.getString("key_exposure_compensation", "50"));

    if (front_camera){
        mAddPersonView.setCameraIndex(CameraBridgeViewBase.CAMERA_ID_FRONT);
    } else {
        mAddPersonView.setCameraIndex(CameraBridgeViewBase.CAMERA_ID_BACK);
    }
    mAddPersonView.setVisibility(SurfaceView.VISIBLE);
    mAddPersonView.setCvCameraViewListener(this);

    int maxCameraViewWidth = Integer.parseInt(sharedPref.getString("key_maximum_camera_view_width", "640"));
    int maxCameraViewHeight = Integer.parseInt(sharedPref.getString("key_maximum_camera_view_height", "480"));
    mAddPersonView.setMaxFrameSize(maxCameraViewWidth, maxCameraViewHeight);
}
 
Example #11
Source File: DetectionTestActivity.java    From Android-Face-Recognition-with-Deep-Learning-Test-Framework with Apache License 2.0 4 votes vote down vote up
@Override
protected void onResume() {
    super.onResume();
    final Handler handler = new Handler(Looper.getMainLooper());
    thread = new Thread(new Runnable() {
        public void run() {
            if(!Thread.currentThread().isInterrupted()){
                PreProcessorFactory ppF = new PreProcessorFactory(getApplicationContext());

                FileHelper fileHelper = new FileHelper();
                File[] detectionFolders = fileHelper.getDetectionTestList();
                if (detectionFolders.length > 0) {
                    // total and matches are used to calculate the accuracy afterwards
                    int total = 0;
                    int matches = 0;
                    List<String> results = new ArrayList<>();
                    results.add("Expected Name;Expected File;Result");
                    Date time_start = new Date();
                    for (File folder : detectionFolders) {
                        File[] files = folder.listFiles();
                        int counter = 1;
                        for (File file : files) {
                            if (FileHelper.isFileAnImage(file)) {
                                Mat imgRgba = Imgcodecs.imread(file.getAbsolutePath());
                                Imgproc.cvtColor(imgRgba, imgRgba, Imgproc.COLOR_BGRA2RGBA);

                                List<Mat> images = ppF.getProcessedImage(imgRgba, PreProcessorFactory.PreprocessingMode.DETECTION);
                                Rect[] faces = ppF.getFacesForRecognition();

                                String result = "";

                                if (faces == null || faces.length == 0) {
                                    result = RESULT_NEGATIVE;
                                } else {
                                    result = RESULT_POSITIVE;
                                    faces = MatOperation.rotateFaces(imgRgba, faces, ppF.getAngleForRecognition());
                                    for(int i = 0; i<faces.length; i++){
                                        MatOperation.drawRectangleAndLabelOnPreview(images.get(0), faces[i], "", false);
                                    }
                                }

                                // Save images
                                String[] tokens = file.getName().split("\\.");
                                String filename = tokens[0];
                                for (int i=0; i<images.size();i++){
                                    MatName m = new MatName(filename + "_" + (i + 1), images.get(i));
                                    fileHelper.saveMatToImage(m, FileHelper.RESULTS_PATH + "/" + time_start.toString() + "/");
                                }

                                tokens = file.getParent().split("/");
                                final String name = tokens[tokens.length - 1];

                                results.add(name + ";" + file.getName() + ";" + result);

                                total++;

                                if (name.equals(result)) {
                                    matches++;
                                }
                                // Update screen to show the progress
                                final int counterPost = counter;
                                final int filesLength = files.length;
                                progress.post(new Runnable() {
                                    @Override
                                    public void run() {
                                        progress.append("Image " + counterPost + " of " + filesLength + " from " + name + "\n");
                                    }
                                });
                                counter++;
                            }
                        }
                    }
                    Date time_end = new Date();
                    long duration = time_end.getTime() - time_start.getTime();
                    int durationPerImage = (int) duration / total;
                    double accuracy = (double) matches / (double) total;
                    Map<String, ?> printMap = PreferenceManager.getDefaultSharedPreferences(getApplicationContext()).getAll();
                    fileHelper.saveResultsToFile(printMap, accuracy, durationPerImage, results);

                    final Intent intent = new Intent(getApplicationContext(), MainActivity.class);
                    intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
                    intent.putExtra("accuracy", accuracy);
                    handler.post(new Runnable() {
                        @Override
                        public void run() {
                            startActivity(intent);
                        }
                    });
                }
            } else {
                Thread.currentThread().interrupt();
            }
        }
    });
    thread.start();
}
 
Example #12
Source File: TrainingActivity.java    From Android-Face-Recognition-with-Deep-Learning-Test-Framework with Apache License 2.0 4 votes vote down vote up
@Override
    public void onResume()
    {
        super.onResume();

        final Handler handler = new Handler(Looper.getMainLooper());
        thread = new Thread(new Runnable() {
            public void run() {
                if(!Thread.currentThread().isInterrupted()){
                    PreProcessorFactory ppF = new PreProcessorFactory(getApplicationContext());
                    PreferencesHelper preferencesHelper = new PreferencesHelper(getApplicationContext());
                    String algorithm = preferencesHelper.getClassificationMethod();

                    FileHelper fileHelper = new FileHelper();
                    fileHelper.createDataFolderIfNotExsiting();
                    final File[] persons = fileHelper.getTrainingList();
                    if (persons.length > 0) {
                        Recognition rec = RecognitionFactory.getRecognitionAlgorithm(getApplicationContext(), Recognition.TRAINING, algorithm);
                        for (File person : persons) {
                            if (person.isDirectory()){
                                File[] files = person.listFiles();
                                int counter = 1;
                                for (File file : files) {
                                    if (FileHelper.isFileAnImage(file)){
                                        Mat imgRgb = Imgcodecs.imread(file.getAbsolutePath());
                                        Imgproc.cvtColor(imgRgb, imgRgb, Imgproc.COLOR_BGRA2RGBA);
                                        Mat processedImage = new Mat();
                                        imgRgb.copyTo(processedImage);
                                        List<Mat> images = ppF.getProcessedImage(processedImage, PreProcessorFactory.PreprocessingMode.RECOGNITION);
                                        if (images == null || images.size() > 1) {
                                            // More than 1 face detected --> cannot use this file for training
                                            continue;
                                        } else {
                                            processedImage = images.get(0);
                                        }
                                        if (processedImage.empty()) {
                                            continue;
                                        }
                                        // The last token is the name --> Folder name = Person name
                                        String[] tokens = file.getParent().split("/");
                                        final String name = tokens[tokens.length - 1];

                                        MatName m = new MatName("processedImage", processedImage);
                                        fileHelper.saveMatToImage(m, FileHelper.DATA_PATH);

                                        rec.addImage(processedImage, name, false);

//                                      fileHelper.saveCroppedImage(imgRgb, ppF, file, name, counter);

                                        // Update screen to show the progress
                                        final int counterPost = counter;
                                        final int filesLength = files.length;
                                        progress.post(new Runnable() {
                                            @Override
                                            public void run() {
                                                progress.append("Image " + counterPost + " of " + filesLength + " from " + name + " imported.\n");
                                            }
                                        });

                                        counter++;
                                    }
                                }
                            }
                        }
                        final Intent intent = new Intent(getApplicationContext(), MainActivity.class);
                        intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
                        if (rec.train()) {
                            intent.putExtra("training", "Training successful");
                        } else {
                            intent.putExtra("training", "Training failed");
                        }
                        handler.post(new Runnable() {
                            @Override
                            public void run() {
                                startActivity(intent);
                            }
                        });
                    } else {
                        Thread.currentThread().interrupt();
                    }
                }
            }
        });
        thread.start();
    }
 
Example #13
Source File: SupportVectorMachine.java    From Android-Face-Recognition-with-Deep-Learning-Library with Apache License 2.0 4 votes vote down vote up
public SupportVectorMachine(File trainingFile, File predictionFile){
    fh = new FileHelper();
    this.trainingFile = trainingFile;
    this.predictionFile = predictionFile;
    trainingList = new ArrayList<>();
}