Skip to content

Instantly share code, notes, and snippets.

View qichuan's full-sized avatar

Qichuan (Sean) ZHANG qichuan

View GitHub Profile
private fun drawGlasses(canvas: Canvas, face: FirebaseVisionFace) {
val leftEye = face.getLandmark(FirebaseVisionFaceLandmark.LEFT_EYE)
val rightEye = face.getLandmark(FirebaseVisionFaceLandmark.RIGHT_EYE)
if (leftEye != null && rightEye != null) {
val eyeDistance = leftEye.position.x - rightEye.position.x
val delta = (widthScaleFactor * eyeDistance / 2).toInt()
val glassesRect = Rect(
translateX(leftEye.position.x).toInt() - delta,
translateY(leftEye.position.y).toInt() - delta,
/**
* Adjusts the x coordinate from the preview's coordinate system to the view coordinate system.
*/
private fun translateX(x: Float): Float {
return width - scaleX(x)
}
/**
* Adjusts the y coordinate from the preview's coordinate system to the view coordinate system.
*/
widthScaleFactor = canvas.width.toFloat() / previewWidth.toFloat()
heightScaleFactor = canvas.height.toFloat() / previewHeight.toFloat()
// Perform face detection
detector.detectInImage(firebaseVisionImage).addOnSuccessListener { faceList ->
if (faceList.size > 0) {
// We just need the first face
val face = faceList[0]
// Draw the bitmaps on the detected faces
}
}
// Build a image meta data object
val metadata = FirebaseVisionImageMetadata.Builder()
.setFormat(FirebaseVisionImageMetadata.IMAGE_FORMAT_NV21)
.setWidth(frame.size.width)
.setHeight(frame.size.height)
.setRotation(rotation)
.build()
// Create vision image object, and it will be consumed by FirebaseVisionFaceDetector
// for face detection
val firebaseVisionImage = FirebaseVisionImage.fromByteArray(frame.data, metadata)
// Obtain the FirebaseVisionFaceDetector instance
private val detector = FirebaseVision.getInstance().getVisionFaceDetector(options)
// Initialize the face detection option, and we need all the face landmarks
private val options = FirebaseVisionFaceDetectorOptions.Builder()
.setLandmarkType(FirebaseVisionFaceDetectorOptions.ALL_LANDMARKS)
.build()
cameraView.addFrameProcessor { frame ->
// Processes frame here
}
lifecycle.addObserver(MainActivityLifecycleObserver(camera_view))
//MainActivityLifecycleObserver.class
class MainActivityLifecycleObserver(private val cameraView: CameraView) : LifecycleObserver {
@OnLifecycleEvent(Lifecycle.Event.ON_RESUME)
fun startCamera() {
cameraView.start()
}
override fun onRequestPermissionsResult(requestCode: Int, permissions: Array<out String>, grantResults: IntArray) {
if (requestCode == PERMISION_REQUEST_CODE) {
if (android.Manifest.permission.CAMERA == permissions[0] &&
grantResults[0] == PackageManager.PERMISSION_GRANTED) {
startFaceProcessor()
}
}
super.onRequestPermissionsResult(requestCode, permissions, grantResults)
}