Открыть посещаемость обнаружения лиц в резюме по локально сохраненным изображениям Android

#java #android #opencv #image-processing

#java #Android #opencv #обработка изображений

Вопрос:

открытый класс MainActivity расширяет AppCompatActivity {

 private static final String TAG = "MainActivity";
Button facedetect;
GraphicOverlay graphicOverlay;
CameraView cameraView;
AlertDialog alertDialog;

Bitmap captureImage, saveImage;
Bitmap b1,b2;

@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_main);

    facedetect = findViewById(R.id.detect_face_btn);
    graphicOverlay = findViewById(R.id.graphic_overlay);
    cameraView = findViewById(R.id.camera_view);


    facedetect.setOnClickListener(new View.OnClickListener() {
        @Override
        public void onClick(View v) {
            cameraView.start();
            cameraView.captureImage();
            graphicOverlay.clear();
        }
    });

    cameraView.addCameraKitListener(new CameraKitEventListenerAdapter() {
        @Override
        public void onEvent(CameraKitEvent event) {
            super.onEvent(event);
        }

        @Override
        public void onError(CameraKitError error) {
            super.onError(error);
        }

        @Override
        public void onImage(CameraKitImage image) {
            super.onImage(image);
            alertDialog.show();

            b1 = image.getBitmap();
            b1 = Bitmap.createScaledBitmap(b1, cameraView.getWidth(), cameraView.getHeight(), false);
            cameraView.stop();

            getImageFromLocal();`enter code here`
 

// если ( compareImages(b1, b2)==true) {
// Toast.makeText(getApplicationContext(),»True», Toast.LENGTH_SHORT).show();
// }
// Toast.makeText(getApplicationContext(), «False», Toast.LENGTH_SHORT).show();

             processFaceDetaection(b1);
        }

        @Override
        public void onVideo(CameraKitVideo video) {
            super.onVideo(video);
        }
    });

}


private void processFaceDetaection(Bitmap bitmap) {

    FirebaseVisionImage firebaseVisionImage = FirebaseVisionImage.fromBitmap(bitmap);
    FirebaseVisionFaceDetectorOptions firebaseVisionFaceDetectorOptions = new FirebaseVisionFaceDetectorOptions.Builder().build();

    // High-accuracy landmark detection and face classification
    FirebaseVisionFaceDetectorOptions landmarkdetectionfacedetection =
            new FirebaseVisionFaceDetectorOptions.Builder()
                    .setPerformanceMode(FirebaseVisionFaceDetectorOptions.ACCURATE)
                    .setLandmarkMode(FirebaseVisionFaceDetectorOptions.ALL_LANDMARKS)
                    .setClassificationMode(FirebaseVisionFaceDetectorOptions.ALL_CLASSIFICATIONS)
                    .build();

    // Real-time contour detection of multiple faces
    FirebaseVisionFaceDetectorOptions contourdetectionfacedetection =
            new FirebaseVisionFaceDetectorOptions.Builder()
                    .setContourMode(FirebaseVisionFaceDetectorOptions.ALL_CONTOURS)
                    .build();


    FirebaseVisionFaceDetector firebaseVisionFaceDetector = FirebaseVision.getInstance().getVisionFaceDetector(firebaseVisionFaceDetectorOptions);
 

// FirebaseVisionFaceDetector firebaseVisionFaceDetector = FirebaseVision.getInstance().getVisionFaceDetector(contourdetectionfacedetection);

     firebaseVisionFaceDetector.detectInImage(firebaseVisionImage).addOnSuccessListener(new OnSuccessListener<List<FirebaseVisionFace>>() {
        @Override
        public void onSuccess(List<FirebaseVisionFace> firebaseVisionFaces) {
            getFaceResult(firebaseVisionFaces);
            compareFaceFromLocal();
        }
    }).addOnFailureListener(new OnFailureListener() {
        @Override
        public void onFailure(@NonNull Exception e) {
            alertDialog.dismiss();
            Toast.makeText(MainActivity.this, "Error : "   e.getMessage(), Toast.LENGTH_SHORT).show();
        }
    });

}

private void compareFaceFromLocal() {

    if (compareImages(b1, b2) == true) {
        Toast.makeText(getApplicationContext(), "True", Toast.LENGTH_SHORT).show();
    }
    Toast.makeText(getApplicationContext(), "False", Toast.LENGTH_SHORT).show();
}


private void getFaceResult(List<FirebaseVisionFace> firebaseVisionFaces) {

    int counter = 0;
    for (FirebaseVisionFace face : firebaseVisionFaces) {
        Rect rect = face.getBoundingBox();
        ReactOverlay reactOverlay = new ReactOverlay(graphicOverlay, rect);
        graphicOverlay.add(reactOverlay);

        counter = counter   1;
    }

    Toast.makeText(MainActivity.this, "No. of faces detected : "   counter, Toast.LENGTH_SHORT).show();

    alertDialog.dismiss();
}


private static final SparseIntArray ORIENTATIONS = new SparseIntArray();

static {
    ORIENTATIONS.append(Surface.ROTATION_0, 90);
    ORIENTATIONS.append(Surface.ROTATION_90, 0);
    ORIENTATIONS.append(Surface.ROTATION_180, 270);
    ORIENTATIONS.append(Surface.ROTATION_270, 180);
}

/**
 * Get the angle by which an image must be rotated given the device's current
 * orientation.
 */
@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
private int getRotationCompensation(String cameraId, Activity activity, Context context)
        throws CameraAccessException {
    // Get the device's current rotation relative to its "native" orientation.
    // Then, from the ORIENTATIONS table, look up the angle the image must be
    // rotated to compensate for the device's rotation.
    int deviceRotation = activity.getWindowManager().getDefaultDisplay().getRotation();
    int rotationCompensation = ORIENTATIONS.get(deviceRotation);
    // On most devices, the sensor orientation is 90 degrees, but for some
    // devices it is 270 degrees. For devices with a sensor orientation of
    // 270, rotate the image an additional 180 ((270   270) % 360) degrees.
    CameraManager cameraManager = (CameraManager) context.getSystemService(CAMERA_SERVICE);
    int sensorOrientation = cameraManager
            .getCameraCharacteristics(cameraId)
            .get(CameraCharacteristics.SENSOR_ORIENTATION);
    rotationCompensation = (rotationCompensation   sensorOrientation   270) % 360;
    // Return the corresponding FirebaseVisionImageMetadata rotation value.
    int resu<
    switch (rotationCompensation) {
        case 0:
            result = FirebaseVisionImageMetadata.ROTATION_0;
            break;
        case 90:
            result = FirebaseVisionImageMetadata.ROTATION_90;
            break;
        case 180:
            result = FirebaseVisionImageMetadata.ROTATION_180;
            break;
        case 270:
            result = FirebaseVisionImageMetadata.ROTATION_270;
            break;
        default:
            result = FirebaseVisionImageMetadata.ROTATION_0;
            Log.e(TAG, "Bad rotation value: "   rotationCompensation);
    }
    return resu<
}

@Override
protected void onPause() {
    super.onPause();

    cameraView.stop();
}


@Override
protected void onResume() {
    super.onResume();

    cameraView.start();
}


private void getImageFromLocal() {
    Intent intent = new Intent(Intent.ACTION_GET_CONTENT);
    intent.setType("image/*");
    startActivityForResult(intent, 0);

}




/*Compare two images.
 * @param bitmap1
 * @param bitmap2
 * @return true iff both images have the same dimensions and pixel values.*/

public static boolean compareImages(Bitmap bitmap1, Bitmap bitmap2) {
    if (bitmap1.getWidth() != bitmap2.getWidth() ||
            bitmap1.getHeight() != bitmap2.getHeight()) {
        return false;
    }

    for (int y = 0; y < bitmap1.getHeight(); y  ) {
        for (int x = 0; x < bitmap1.getWidth(); x  ) {
            if (bitmap1.getPixel(x, y) != bitmap2.getPixel(x, y)) {
                return false;
            }
        }
    }

    return true;
}


public static Uri imageURI;

@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {

    super.onActivityResult(requestCode, resultCode, data);

    if (resultCode == Activity.RESULT_OK) {

        if (requestCode == 0) {

            if (data != null) {
                try {
                    b2 = MediaStore.Images.Media.getBitmap(getApplicationContext().getContentResolver(), data.getData());
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }

        }

    } else {
        System.exit(0);
        Log.e("result", "BAD");
    }
}
 

}

Комментарии:

1. Можете ли вы объяснить проблему, с которой вы столкнулись?

2. я не могу сравнить два изображения, поскольку обнаруженное лицо находится в другом формате, а локальное изображение — в другом формате, как вы видите в коде. Сравнение лица всегда возвращает false